Python script to request url and download csv and parse using CSV reader

jobID_url = 'https://analytics.chartboost.com/v3/metrics/jobs/'
campaign_by_apps_url = 'https://analytics.chartboost.com/v3/metrics/campaign?'
csv_file = 'campaign_by_apps.csv'

def initialize_api_request(star_date, end_date, appid, api_token, platform):
    global current_platform
    current_platform = platform  

    platform = urllib.quote(platform)
    cleanup()
    print '[*] Initializing API request for chartboost campaign by apps for platform %s' % current_platform
    request_url = campaign_by_apps_url + 'dateMin='+ str(star_date) +'&dateMax=' + str(end_date) + '&groupBy=app&platform=' + platform + '&userId=' + appid +'&userSignature=' + api_token
    try:
        response = urllib2.urlopen(request_url)
    except urllib2.HTTPError as e:
        print '[*] Connection failed because of error code : ' + str(e.code) + ', Retrying Connection...'
        time.sleep(10)
        initialize_api_request(star_date, end_date, appid, api_token, current_platform)
    except urllib2.URLError as e:
        print '[*] Connection failed because of URLError : ' + str(e)
    else:
        response_json = json.loads(response.read())
        print '[*] Job ID successfully obtained!'
        jobID = response_json["jobId"]
        download_csv_file(jobID)

def download_csv_file(jobID):
    # It may take upto 1 minute for the file to be available
    print '[*] Starting download...'
    time.sleep(60)

    csv_dowload_url = jobID_url + jobID
    try:
        response = urllib2.urlopen(csv_dowload_url)
    except urllib2.HTTPError as e:
        print '[*] Download failed because of error code : ' + str(e.code) + ', Retrying download...'
        time.sleep(10)
        cleanup()
        download_csv_file(jobID)
    except urllib2.URLError as e:
        print '[*] Download failed because of URLError : ' + str(e)
    else:
        with open(csv_file, 'wb') as f:
            f.write(response.read())
        print '[*] Download successful!'
        with open(csv_file, 'rb') as csvfile:
            try:
                dialect = csv.Sniffer().sniff(csvfile.read(1024))
                csvfile.seek(0)
                csvfile.close()
                print '[*] CSV file is valid'
                begin_import()
            except Exception, e:   
                print '[*] CSV file is invalid because: ' + str(e) + ', Retrying download...'
                cleanup()
                download_csv_file(jobID)

def cleanup():
    print '[*] Begin cleanup'
    if os.path.isfile(csv_file):
        print '[*] CSV file exist, destroying it'
        os.remove(csv_file)
    else:
        print '[*] CSV not found, attempting to fetch a new one from the server'

def begin_import():
    csv_data = csv.reader(codecs.open(csv_file, 'rb', 'utf-16'), delimiter='\t', quotechar='|')
    column_names = next(csv_data, None) #skip the first row as they are heading
    for row in csv_data:
           # Add to database