{ "cells": [ { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [], "source": [ "import requests\n", "import json,os,re,datetime\n", "import csv\n", "import pandas as pd\n", "\n", "zomato_api_key= \"\" #os.getenv('auth_key') # importing the key into the environment variable\n", "\n", "path_to_folder= \"\" # destination path\n", "\n", "print(zomato_api_key)" ] }, { "cell_type": "code", "execution_count": 53, "metadata": { "scrolled": false }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "{'results_found': 939, 'results_start': 0, 'results_shown': 0, 'restaurants': []}\n" ] } ], "source": [ "import requests\n", "from requests.adapters import HTTPAdapter\n", "from requests.packages.urllib3.util.retry import Retry\n", "\n", "headers1={'user-key' : zomato_api_key}\n", "\n", "\n", "#rests_url=('https://developers.zomato.com/api/v2.1/cities?q=Santiago')#\n", "rests_url = ('https://developers.zomato.com/api/v2.1/search?entity_id=40&entity_type=city&start=131&count=20') # fetching the data from this url \n", "get_request = requests.get(rests_url, headers=headers1)\n", "read_rest=json.loads(get_request.text) #loading the data fetched to an object\n", "print(read_rest)" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [], "source": [ "import requests\n", "from requests.adapters import HTTPAdapter\n", "from requests.packages.urllib3.util.retry import Retry\n", "\n", "headers1={'user-key' : zomato_api_key}\n", "read_rest=[]\n", "rest_list=[]\n", "\n", "def call_1(): \n", " for i in range(1, 80):\n", " read_rest=[]\n", " try:\n", " rests_url=('https://developers.zomato.com/api/v2.1/search?entity_id='+str(i)+'&entity_type=city&start=101&count=20') # fetching the data from this url \n", " get_request = requests.get(rests_url, headers=headers1)\n", " read_rest=json.loads(get_request.text) #loading the data fetched to an object\n", " rest_list.append(read_rest)\n", " except requests.exceptions.ConnectionError as r:\n", " r.status_code = \"Connection refused\"" ] }, { "cell_type": "code", "execution_count": 4, "metadata": {}, "outputs": [], "source": [ "headers1={'user-key' : zomato_api_key}\n", "read_rest=[]\n", "rest_list1=[]\n", "\n", "def call_2(): \n", " for i in range(1, 80):\n", " read_rest=[]\n", " try:\n", " rests_url=('https://developers.zomato.com/api/v2.1/search?entity_id='+str(i)+'&entity_type=city&start=121&count=20') # fetching the data from this url \n", " get_request = requests.get(rests_url, headers=headers1)\n", " read_rest=json.loads(get_request.text) #loading the data fetched to an object\n", " rest_list1.append(read_rest)\n", " except requests.exceptions.ConnectionError as r:\n", " r.status_code = \"Connection refused\"" ] }, { "cell_type": "code", "execution_count": 5, "metadata": {}, "outputs": [], "source": [ "headers1={'user-key' : zomato_api_key}\n", "read_rest=[]\n", "rest_list2=[]\n", "\n", "def call_3(): \n", " for i in range(1, 80):\n", " read_rest=[]\n", " try:\n", " rests_url=('https://developers.zomato.com/api/v2.1/search?entity_id='+str(i)+'&entity_type=city&start=141&count=20') # fetching the data from this url \n", " get_request = requests.get(rests_url, headers=headers1)\n", " read_rest=json.loads(get_request.text) #loading the data fetched to an object\n", " rest_list2.append(read_rest)\n", " except requests.exceptions.ConnectionError as r:\n", " r.status_code = \"Connection refused\"" ] }, { "cell_type": "code", "execution_count": 6, "metadata": {}, "outputs": [], "source": [ "headers1={'user-key' : zomato_api_key}\n", "read_rest=[]\n", "rest_list3=[]\n", "\n", "def call_4(): \n", " for i in range(1, 80):\n", " read_rest=[]\n", " try:\n", " rests_url=('https://developers.zomato.com/api/v2.1/search?entity_id='+str(i)+'&entity_type=city&start=161&count=20') # fetching the data from this url \n", " get_request = requests.get(rests_url, headers=headers1)\n", " read_rest=json.loads(get_request.text) #loading the data fetched to an object\n", " rest_list3.append(read_rest)\n", " except requests.exceptions.ConnectionError as r:\n", " r.status_code = \"Connection refused\"" ] }, { "cell_type": "code", "execution_count": 7, "metadata": {}, "outputs": [], "source": [ "headers1={'user-key' : zomato_api_key}\n", "read_rest=[]\n", "rest_list4=[]\n", "\n", "def call_5(): \n", " for i in range(1, 80):\n", " read_rest=[]\n", " try:\n", " rests_url=('https://developers.zomato.com/api/v2.1/search?entity_id='+str(i)+'&entity_type=city&start=181&count=20') # fetching the data from this url \n", " get_request = requests.get(rests_url, headers=headers1)\n", " read_rest=json.loads(get_request.text) #loading the data fetched to an object\n", " rest_list4.append(read_rest)\n", " except requests.exceptions.ConnectionError as r:\n", " r.status_code = \"Connection refused\"" ] }, { "cell_type": "code", "execution_count": 8, "metadata": {}, "outputs": [], "source": [ "import requests\n", "from requests.adapters import HTTPAdapter\n", "from requests.packages.urllib3.util.retry import Retry\n", "\n", "headers1={'user-key' : zomato_api_key}\n", "read_rest=[]\n", "rest_list5=[]\n", "\n", "def call_6(): \n", " for i in range(276, 310):\n", " read_rest=[]\n", " try:\n", " rests_url=('https://developers.zomato.com/api/v2.1/search?entity_id='+str(i)+'&entity_type=city&start=101&count=20') # fetching the data from this url \n", " get_request = requests.get(rests_url, headers=headers1)\n", " read_rest=json.loads(get_request.text) #loading the data fetched to an object\n", " rest_list5.append(read_rest)\n", " except requests.exceptions.ConnectionError as r:\n", " r.status_code = \"Connection refused\"" ] }, { "cell_type": "code", "execution_count": 9, "metadata": {}, "outputs": [], "source": [ "import requests\n", "from requests.adapters import HTTPAdapter\n", "from requests.packages.urllib3.util.retry import Retry\n", "\n", "headers1={'user-key' : zomato_api_key}\n", "read_rest=[]\n", "rest_list6=[]\n", "\n", "def call_7(): \n", " for i in range(276, 310):\n", " read_rest=[]\n", " try:\n", " rests_url=('https://developers.zomato.com/api/v2.1/search?entity_id='+str(i)+'&entity_type=city&start=121&count=20') # fetching the data from this url \n", " get_request = requests.get(rests_url, headers=headers1)\n", " read_rest=json.loads(get_request.text) #loading the data fetched to an object\n", " rest_list6.append(read_rest)\n", " except requests.exceptions.ConnectionError as r:\n", " r.status_code = \"Connection refused\"" ] }, { "cell_type": "code", "execution_count": 10, "metadata": {}, "outputs": [], "source": [ "import requests\n", "from requests.adapters import HTTPAdapter\n", "from requests.packages.urllib3.util.retry import Retry\n", "\n", "headers1={'user-key' : zomato_api_key}\n", "read_rest=[]\n", "rest_list7=[]\n", "\n", "def call_8(): \n", " for i in range(276, 310):\n", " read_rest=[]\n", " try:\n", " rests_url=('https://developers.zomato.com/api/v2.1/search?entity_id='+str(i)+'&entity_type=city&start=141&count=20') # fetching the data from this url \n", " get_request = requests.get(rests_url, headers=headers1)\n", " read_rest=json.loads(get_request.text) #loading the data fetched to an object\n", " rest_list7.append(read_rest)\n", " except requests.exceptions.ConnectionError as r:\n", " r.status_code = \"Connection refused\"" ] }, { "cell_type": "code", "execution_count": 11, "metadata": {}, "outputs": [], "source": [ "import requests\n", "from requests.adapters import HTTPAdapter\n", "from requests.packages.urllib3.util.retry import Retry\n", "\n", "headers1={'user-key' : zomato_api_key}\n", "read_rest=[]\n", "rest_list8=[]\n", "\n", "def call_9(): \n", " for i in range(276, 310):\n", " read_rest=[]\n", " try:\n", " rests_url=('https://developers.zomato.com/api/v2.1/search?entity_id='+str(i)+'&entity_type=city&start=161&count=20') # fetching the data from this url \n", " get_request = requests.get(rests_url, headers=headers1)\n", " read_rest=json.loads(get_request.text) #loading the data fetched to an object\n", " rest_list8.append(read_rest)\n", " except requests.exceptions.ConnectionError as r:\n", " r.status_code = \"Connection refused\"" ] }, { "cell_type": "code", "execution_count": 12, "metadata": {}, "outputs": [], "source": [ "import requests\n", "from requests.adapters import HTTPAdapter\n", "from requests.packages.urllib3.util.retry import Retry\n", "\n", "headers1={'user-key' : zomato_api_key}\n", "read_rest=[]\n", "rest_list9=[]\n", "\n", "def call_10(): \n", " for i in range(276, 310):\n", " read_rest=[]\n", " try:\n", " rests_url=('https://developers.zomato.com/api/v2.1/search?entity_id='+str(i)+'&entity_type=city&start=181&count=20') # fetching the data from this url \n", " get_request = requests.get(rests_url, headers=headers1)\n", " read_rest=json.loads(get_request.text) #loading the data fetched to an object\n", " rest_list9.append(read_rest)\n", " except requests.exceptions.ConnectionError as r:\n", " r.status_code = \"Connection refused\"" ] }, { "cell_type": "code", "execution_count": 13, "metadata": {}, "outputs": [], "source": [ "call_1()" ] }, { "cell_type": "code", "execution_count": 14, "metadata": {}, "outputs": [], "source": [ "call_2()" ] }, { "cell_type": "code", "execution_count": 15, "metadata": {}, "outputs": [], "source": [ "call_3()" ] }, { "cell_type": "code", "execution_count": 16, "metadata": {}, "outputs": [], "source": [ "call_4()" ] }, { "cell_type": "code", "execution_count": 17, "metadata": {}, "outputs": [], "source": [ "call_5()" ] }, { "cell_type": "code", "execution_count": 18, "metadata": {}, "outputs": [], "source": [ "call_6()" ] }, { "cell_type": "code", "execution_count": 19, "metadata": {}, "outputs": [], "source": [ "call_7()" ] }, { "cell_type": "code", "execution_count": 20, "metadata": {}, "outputs": [], "source": [ "call_8()" ] }, { "cell_type": "code", "execution_count": 21, "metadata": {}, "outputs": [], "source": [ "call_9()" ] }, { "cell_type": "code", "execution_count": 22, "metadata": {}, "outputs": [], "source": [ "call_10()" ] }, { "cell_type": "code", "execution_count": 23, "metadata": {}, "outputs": [], "source": [ "path_file=os.path.join(path_to_folder,'file11'+'.json') #collection of the raw data into json file\n", "with open(path_file,'w', encoding=\"utf-8\") as jsonfile:\n", " json.dump(rest_list,jsonfile, ensure_ascii=False) #dumping the data into json file" ] }, { "cell_type": "code", "execution_count": 24, "metadata": {}, "outputs": [], "source": [ "path_file=os.path.join(path_to_folder,'file12'+'.json') #collection of the raw data into json file\n", "with open(path_file,'w', encoding=\"utf-8\") as jsonfile:\n", " json.dump(rest_list1,jsonfile, ensure_ascii=False) #dumping the data into json file" ] }, { "cell_type": "code", "execution_count": 25, "metadata": {}, "outputs": [], "source": [ "path_file=os.path.join(path_to_folder,'file13'+'.json') #collection of the raw data into json file\n", "with open(path_file,'w', encoding=\"utf-8\") as jsonfile:\n", " json.dump(rest_list2,jsonfile, ensure_ascii=False) #dumping the data into json file" ] }, { "cell_type": "code", "execution_count": 26, "metadata": {}, "outputs": [], "source": [ "path_file=os.path.join(path_to_folder,'file14'+'.json') #collection of the raw data into json file\n", "with open(path_file,'w', encoding=\"utf-8\") as jsonfile:\n", " json.dump(rest_list3,jsonfile, ensure_ascii=False) #dumping the data into json file" ] }, { "cell_type": "code", "execution_count": 27, "metadata": {}, "outputs": [], "source": [ "path_file=os.path.join(path_to_folder,'file15'+'.json') #collection of the raw data into json file\n", "with open(path_file,'w', encoding=\"utf-8\") as jsonfile:\n", " json.dump(rest_list4,jsonfile, ensure_ascii=False) #dumping the data into json file" ] }, { "cell_type": "code", "execution_count": 28, "metadata": {}, "outputs": [], "source": [ "path_file=os.path.join(path_to_folder,'file16'+'.json') #collection of the raw data into json file\n", "with open(path_file,'w', encoding=\"utf-8\") as jsonfile:\n", " json.dump(rest_list5,jsonfile, ensure_ascii=False) #dumping the data into json file" ] }, { "cell_type": "code", "execution_count": 29, "metadata": {}, "outputs": [], "source": [ "path_file=os.path.join(path_to_folder,'file17'+'.json') #collection of the raw data into json file\n", "with open(path_file,'w', encoding=\"utf-8\") as jsonfile:\n", " json.dump(rest_list6,jsonfile, ensure_ascii=False) #dumping the data into json file" ] }, { "cell_type": "code", "execution_count": 30, "metadata": {}, "outputs": [], "source": [ "path_file=os.path.join(path_to_folder,'file18'+'.json') #collection of the raw data into json file\n", "with open(path_file,'w', encoding=\"utf-8\") as jsonfile:\n", " json.dump(rest_list7,jsonfile, ensure_ascii=False) #dumping the data into json file" ] }, { "cell_type": "code", "execution_count": 31, "metadata": {}, "outputs": [], "source": [ "path_file=os.path.join(path_to_folder,'file19'+'.json') #collection of the raw data into json file\n", "with open(path_file,'w', encoding=\"utf-8\") as jsonfile:\n", " json.dump(rest_list8,jsonfile, ensure_ascii=False) #dumping the data into json file" ] }, { "cell_type": "code", "execution_count": 32, "metadata": {}, "outputs": [], "source": [ "path_file=os.path.join(path_to_folder,'file20'+'.json') #collection of the raw data into json file\n", "with open(path_file,'w', encoding=\"utf-8\") as jsonfile:\n", " json.dump(rest_list9,jsonfile, ensure_ascii=False) #dumping the data into json file" ] }, { "cell_type": "code", "execution_count": 33, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "['file1.json', 'file10.json', 'file11.json', 'file12.json', 'file13.json', 'file14.json', 'file15.json', 'file16.json', 'file17.json', 'file18.json', 'file19.json', 'file2.json', 'file20.json', 'file3.json', 'file4.json', 'file5.json', 'file6.json', 'file7.json', 'file8.json', 'file9.json']\n" ] } ], "source": [ "path_to_folder= \"PATH TO FOLDER\"\n", "path_to_json = \"PATH TO JSON\"\n", "json_files = [pos_json for pos_json in os.listdir(path_to_folder) if pos_json.endswith('.json')] #looping in the folder \"data\" where the the json files are placed \n", "print(json_files)" ] }, { "cell_type": "code", "execution_count": 34, "metadata": {}, "outputs": [], "source": [ "def ensure_dir(path_to_json): #declaring a function \n", " if not os.path.exists(path_to_json): #if the folder doesn't exist then create the directory\n", " os.makedirs(path_to_json)# make a directory" ] }, { "cell_type": "code", "execution_count": 35, "metadata": {}, "outputs": [], "source": [ "#len(loaded_file[7]['restaurants'])#.keys())" ] }, { "cell_type": "code", "execution_count": 36, "metadata": {}, "outputs": [], "source": [ "#(loaded_file[7]['restaurants'])#.keys())" ] }, { "cell_type": "code", "execution_count": 37, "metadata": {}, "outputs": [], "source": [ "#loaded_file[1]['restaurants'][3].keys()" ] }, { "cell_type": "code", "execution_count": 38, "metadata": {}, "outputs": [], "source": [ "#loaded_file" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "import unidecode\n", "path=\"\"\n", "\n", "for js in json_files:\n", " with open(os.path.join(path_to_folder, js),'r', encoding='utf-8', errors=\"ignore\") as json_file: #append the path of file to the source folde\n", " loaded_file = json.load(json_file,strict=False) #load the file to be selected\n", " for i in range(len(loaded_file)):\n", " if 'restaurants' in loaded_file[i].keys():\n", " for j in range(len(loaded_file[i]['restaurants'])):\n", " if('restaurant' in loaded_file[i]['restaurants'][j].keys()):\n", " url = loaded_file[i]['restaurants'][j]['restaurant']['url']\n", " if \"dummy\" in url:\n", " continue\n", " else:\n", " currency=loaded_file[i]['restaurants'][j]['restaurant']['currency']\n", " city=loaded_file[i]['restaurants'][j]['restaurant']['location']['city']\n", " locality=loaded_file[i]['restaurants'][j]['restaurant']['location']['locality']\n", " rating_text=loaded_file[i]['restaurants'][j]['restaurant']['user_rating']['rating_text']\n", " name_res=loaded_file[i]['restaurants'][j]['restaurant']['name'].replace('/',\" \")\n", " name_res=name_res.replace('|',\" \")\n", " name_res=name_res.replace(\"I:ba\",\"I-ba\")\n", " name_res=name_res.replace('á',\"a\")\n", " name_res=name_res.replace('é',\"e\")\n", " name_res=name_res.replace('ě',\"e\")\n", " name_res=name_res.replace('\\'s',\"s\")\n", " name_res=name_res.replace('\\\"Country Store\\\"',\"Country Store\")\n", " name_res=name_res.replace('¿Por Que No?',\"Por_Que_No\")\n", " \n", " locality=locality.replace(':',\"-\")\n", " \n", " \n", " name_res=name_res.strip()\n", "\n", " res_id=loaded_file[i]['restaurants'][j]['restaurant']['R']['res_id']\n", "\n", " if currency=='Rs.': #replacing the currency with the name of the currency \n", " rs=currency.replace('Rs.',\"Indian Rupees(Rs.)\")\n", " path=os.path.join(path_to_json +'/' + rs)\n", " elif currency=='IDR': #replacing the currency with the name of the currency\n", " idr=currency.replace('IDR',\"Indonesian Rupiah(IDR)\")\n", " path=os.path.join(path_to_json +'/' + idr)\n", " elif currency=='P': #replacing the currency with the name of the currency\n", " p=currency.replace('P',\"Botswana Pula(P)\")\n", " path=os.path.join(path_to_json +'/' + p)\n", " elif currency=='$': #replacing the currency with the name of the currency\n", " dol=currency.replace('$',\"Dollar($)\")\n", " path=os.path.join(path_to_json +'/' + dol)\n", " elif currency=='€': #replacing the currency with the name of the currency\n", " euro=currency.replace('€',\"Euro(€)\")\n", " path=os.path.join(path_to_json +'/' +euro)\n", " elif currency=='£': #replacing the currency with the name of the currency\n", " pound=currency.replace('£',\"Pounds(£)\")\n", " path=os.path.join(path_to_json +'/' + pound)\n", " elif currency=='AED': #replacing the currency with the name of the currency\n", " aed=currency.replace('AED',\"Emirati Diram(AED)\")\n", " path=os.path.join(path_to_json +'/' + aed)\n", " elif currency=='LKR': #replacing the currency with the name of the currency\n", " lkr=currency.replace('LKR',\"Sri Lankan Rupee(LKR)\")\n", " path=os.path.join(path_to_json +'/' + lkr)\n", " elif currency=='NZ$': #replacing the currency with the name of the currency\n", " nzd=currency.replace('NZ$',\"NewZealand($)\")\n", " path=os.path.join(path_to_json +'/' +nzd)\n", " elif currency=='QR': #replacing the currency with the name of the currency\n", " qr=currency.replace('QR',\"Qatari Rial(QR)\")\n", " path=os.path.join(path_to_json +'/' + qr)\n", " elif currency=='R': #replacing the currency with the name of the currency\n", " r=currency.replace('R',\"Rand(R)\")\n", " path=os.path.join(path_to_json +'/' + r)\n", " elif currency=='R$': #replacing the currency with the name of the currency\n", " real=currency.replace('R$',\"Brazilian Real(R$)\")\n", " path=os.path.join(path_to_json +'/' +real)\n", " elif currency=='TL': #replacing the currency with the name of the currency\n", " tl=currency.replace('TL',\"Turkish Lira(TL)\")\n", " path=os.path.join(path_to_json +'/' +tl)\n", "\n", " print(name_res)\n", " path=os.path.join(path +'/' + city)\n", " print(path)\n", " path=os.path.join(path +'/' + locality)\n", " print(path)\n", " path=os.path.join(path +'/' + rating_text)\n", " print(path)\n", " path = path.replace(' /',\"/\")\n", " #path = path.replace('á',\"a\")\n", " #path = path.replace('é',\"e\")\n", " #path = path.replace('ě',\"e\")\n", " #path = path.replace('I:ba',\"I-ba\")\n", " #path = upath\n", " #path = unidecode.unidecode(path)\n", " #path = path.decode('unicode_escape')\n", " #print(\"1\")\n", " #if name_res not in [\"L'Opera\",\"The Golconda Bowl\", \"California Cantina\", \"Dunkin Donuts\", \"Bardelli's\",\"Baton Rouge\"]:\n", " #print(type(path))\n", " ensure_dir(path)\n", " #print(\"2\")\n", " print(path)\n", " path=os.path.join(path,str(name_res)+'_'+str(res_id)+'.json')\n", " print(name_res)\n", " print(path)\n", " with open(path,'w') as json_file:\n", " json.dump(loaded_file[i]['restaurants'][j], json_file) #dump every json in its path\n", " #else:\n", " # continue" ] }, { "cell_type": "code", "execution_count": 40, "metadata": {}, "outputs": [], "source": [ "path=\"\"\n", "csvfile=open(path_to_folder+'/'+'zomato_new.csv', 'w', encoding=\"utf-8\", newline=\"\") #open the csvfile\n", "writer=csv.writer(csvfile) #write the csv file using writer\n", "writer.writerow(['Restaurant ID','Restaurant Name','Country Code','City','Address','Locality','Locality Verbose','Longitude','Latitude','Cuisines','Average Cost for two','Currency','Has Table booking','Has Online delivery','Is delivering now','Switch to order menu','Price range','Aggregate rating','Rating color','Rating text','Votes']) #writer to write the row\n", "\n", "for root,dirs,files in os.walk(path_to_json):\n", " for f in files: #check every json file\n", " if f.endswith('.json'):\n", " with open(os.path.join(root,f), 'r', encoding='utf-8', errors=\"ignore\") as json_file:\n", " loaded_file = json.load(json_file,strict=False) #load the file to be selected\n", " currency=loaded_file['restaurant']['currency']\n", " city=loaded_file['restaurant']['location']['city']\n", " locality=loaded_file['restaurant']['location']['locality']\n", " rating_text=loaded_file['restaurant']['user_rating']['rating_text']\n", " name_res=loaded_file['restaurant']['name']\n", " res_id=loaded_file['restaurant']['R']['res_id']\n", " cuisines=loaded_file['restaurant']['cuisines']\n", " has_table_booking=loaded_file['restaurant']['has_table_booking']\n", " has_online_delivery=loaded_file['restaurant']['has_online_delivery']\n", " city_id=loaded_file['restaurant']['location']['city_id']\n", " address=loaded_file['restaurant']['location']['address']\n", " locality_verbose=loaded_file['restaurant']['location']['locality_verbose']\n", " longitude=loaded_file['restaurant']['location']['longitude']\n", " latitude=loaded_file['restaurant']['location']['latitude']\n", " is_delivering_now=loaded_file['restaurant']['is_delivering_now']\n", " country_id=loaded_file['restaurant']['location']['country_id']\n", " price_range=loaded_file['restaurant']['price_range']\n", " switch_to_order_menu=loaded_file['restaurant']['switch_to_order_menu']\n", " aggregate_rating=loaded_file['restaurant']['user_rating']['aggregate_rating']\n", " rating_color=loaded_file['restaurant']['user_rating']['rating_color']\n", " votes=loaded_file['restaurant']['user_rating']['votes']\n", " avg_cost=loaded_file['restaurant']['average_cost_for_two']\n", " if currency=='Rs.': #ensuring the path with the correct currency match\n", " rs=currency.replace('Rs.',\"Indian Rupees(Rs.)\")\n", " writer.writerow([res_id,name_res,country_id,city,address,locality,locality_verbose,longitude,latitude,cuisines,avg_cost,rs,has_table_booking,has_online_delivery,is_delivering_now,switch_to_order_menu,price_range,aggregate_rating,rating_color,rating_text,votes]) #export the data in the required csv format\n", " elif currency=='IDR':#ensuring the path with the correct currency match\n", " idr=currency.replace('IDR',\"Indonesian Rupiah(IDR)\")\n", " writer.writerow([res_id,name_res,country_id,city,address,locality,locality_verbose,longitude,latitude,cuisines,avg_cost,idr,has_table_booking,has_online_delivery,is_delivering_now,switch_to_order_menu,price_range,aggregate_rating,rating_color,rating_text,votes]) #export the data in the required csv format\n", " elif currency=='P':#ensuring the path with the correct currency match\n", " p=currency.replace('P',\"Botswana Pula(P)\")\n", " writer.writerow([res_id,name_res,country_id,city,address,locality,locality_verbose,longitude,latitude,cuisines,avg_cost,p,has_table_booking,has_online_delivery,is_delivering_now,switch_to_order_menu,price_range,aggregate_rating,rating_color,rating_text,votes]) #export the data in the required csv format\n", " elif currency=='$': #ensuring the path with the correct currency match\n", " dol=currency.replace('$',\"Dollar($)\")\n", " writer.writerow([res_id,name_res,country_id,city,address,locality,locality_verbose,longitude,latitude,cuisines,avg_cost,dol,has_table_booking,has_online_delivery,is_delivering_now,switch_to_order_menu,price_range,aggregate_rating,rating_color,rating_text,votes]) #export the data in the required csv format\n", " elif currency=='€':#ensuring the path with the correct currency match\n", " euro=currency.replace('€',\"Euro(€)\")\n", " writer.writerow([res_id,name_res,country_id,city,address,locality,locality_verbose,longitude,latitude,cuisines,avg_cost,euro,has_table_booking,has_online_delivery,is_delivering_now,switch_to_order_menu,price_range,aggregate_rating,rating_color,rating_text,votes]) #export the data in the required csv format\n", " elif currency=='£': #ensuring the path with the correct currency match\n", " pound=currency.replace('£',\"Pounds(£)\")\n", " writer.writerow([res_id,name_res,country_id,city,address,locality,locality_verbose,longitude,latitude,cuisines,avg_cost,pound,has_table_booking,has_online_delivery,is_delivering_now,switch_to_order_menu,price_range,aggregate_rating,rating_color,rating_text,votes]) #export the data in the required csv format\n", " elif currency=='IDR':#ensuring the path with the correct currency match\n", " idr=currency.replace('IDR',\"Indonesian Rupiah(IDR)\")\n", " writer.writerow([res_id,name_res,country_id,city,address,locality,locality_verbose,longitude,latitude,cuisines,avg_cost,idr,has_table_booking,has_online_delivery,is_delivering_now,switch_to_order_menu,price_range,aggregate_rating,rating_color,rating_text,votes]) #export the data in the required csv format\n", " elif currency=='AED':#ensuring the path with the correct currency match\n", " aed=currency.replace('AED',\"Emirati Diram(AED)\")\n", " writer.writerow([res_id,name_res,country_id,city,address,locality,locality_verbose,longitude,latitude,cuisines,avg_cost,aed,has_table_booking,has_online_delivery,is_delivering_now,switch_to_order_menu,price_range,aggregate_rating,rating_color,rating_text,votes]) #export the data in the required csv format\n", " elif currency=='LKR':#ensuring the path with the correct currency match\n", " lkr=currency.replace('LKR',\"Sri Lankan Rupee(LKR)\")\n", " writer.writerow([res_id,name_res,country_id,city,address,locality,locality_verbose,longitude,latitude,cuisines,avg_cost,lkr,has_table_booking,has_online_delivery,is_delivering_now,switch_to_order_menu,price_range,aggregate_rating,rating_color,rating_text,votes]) #export the data in the required csv format\n", " elif currency=='NZ$': #ensuring the path with the correct currency match\n", " nzd=currency.replace('NZ$',\"NewZealand($)\")\n", " writer.writerow([res_id,name_res,country_id,city,address,locality,locality_verbose,longitude,latitude,cuisines,avg_cost,nzd,has_table_booking,has_online_delivery,is_delivering_now,switch_to_order_menu,price_range,aggregate_rating,rating_color,rating_text,votes]) #export the data in the required csv format\n", " elif currency=='QR':#ensuring the path with the correct currency match\n", " qr=currency.replace('QR',\"Qatari Rial(QR)\")\n", " writer.writerow([res_id,name_res,country_id,city,address,locality,locality_verbose,longitude,latitude,cuisines,avg_cost,qr,has_table_booking,has_online_delivery,is_delivering_now,switch_to_order_menu,price_range,aggregate_rating,rating_color,rating_text,votes]) #export the data in the required csv format\n", " elif currency=='R':#ensuring the path with the correct currency match\n", " r=currency.replace('R',\"Rand(R)\")\n", " writer.writerow([res_id,name_res,country_id,city,address,locality,locality_verbose,longitude,latitude,cuisines,avg_cost,r,has_table_booking,has_online_delivery,is_delivering_now,switch_to_order_menu,price_range,aggregate_rating,rating_color,rating_text,votes]) #export the data in the required csv format\n", " elif currency=='R$':#ensuring the path with the correct currency match\n", " real=currency.replace('R$',\"Brazilian Real(R$)\")\n", " writer.writerow([res_id,name_res,country_id,city,address,locality,locality_verbose,longitude,latitude,cuisines,avg_cost,real,has_table_booking,has_online_delivery,is_delivering_now,switch_to_order_menu,price_range,aggregate_rating,rating_color,rating_text,votes]) #export the data in the required csv format\n", " elif currency=='TL':#ensuring the path with the correct currency match\n", " tl=currency.replace('TL',\"Turkish Lira(TL)\")\n", " writer.writerow([res_id,name_res,country_id,city,address,locality,locality_verbose,longitude,latitude,cuisines,avg_cost,tl,has_table_booking,has_online_delivery,is_delivering_now,switch_to_order_menu,price_range,aggregate_rating,rating_color,rating_text,votes]) #export the data in the required csv format\n", " " ] }, { "cell_type": "code", "execution_count": 41, "metadata": {}, "outputs": [ { "data": { "text/html": [ "
\n", " | Restaurant ID | \n", "Restaurant Name | \n", "Country Code | \n", "City | \n", "Address | \n", "Locality | \n", "Locality Verbose | \n", "Longitude | \n", "Latitude | \n", "Cuisines | \n", "... | \n", "Currency | \n", "Has Table booking | \n", "Has Online delivery | \n", "Is delivering now | \n", "Switch to order menu | \n", "Price range | \n", "Aggregate rating | \n", "Rating color | \n", "Rating text | \n", "Votes | \n", "
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0 | \n", "6310675 | \n", "Mama Lou's Italian Kitchen | \n", "162 | \n", "Las Piñas City | \n", "Block 1, Lot 36, Tropical Avenue Corner Tropic... | \n", "BF International | \n", "BF International, Las Piñas City | \n", "121.009787 | \n", "14.447615 | \n", "Italian | \n", "... | \n", "Botswana Pula(P) | \n", "1 | \n", "0 | \n", "0 | \n", "0 | \n", "3 | \n", "4.6 | \n", "3F7E00 | \n", "Excellent | \n", "619 | \n", "
1 | \n", "6310675 | \n", "Mama Lou's Italian Kitchen | \n", "162 | \n", "Las Piñas City | \n", "Block 1, Lot 36, Tropical Avenue Corner Tropic... | \n", "BF International | \n", "BF International, Las Piñas City | \n", "121.009787 | \n", "14.447615 | \n", "Italian | \n", "... | \n", "Botswana Pula(P) | \n", "1 | \n", "0 | \n", "0 | \n", "0 | \n", "3 | \n", "4.6 | \n", "3F7E00 | \n", "Excellent | \n", "619 | \n", "
2 | \n", "6314542 | \n", "Blackbird | \n", "162 | \n", "Makati City | \n", "Nielson Tower, Ayala Triangle Gardens, Salcedo... | \n", "Ayala Triangle Gardens, Salcedo Village, Makat... | \n", "Ayala Triangle Gardens, Salcedo Village, Makat... | \n", "121.024562 | \n", "14.556042 | \n", "European, Asian | \n", "... | \n", "Botswana Pula(P) | \n", "0 | \n", "0 | \n", "0 | \n", "0 | \n", "4 | \n", "4.7 | \n", "3F7E00 | \n", "Excellent | \n", "469 | \n", "
3 | \n", "6301293 | \n", "Banapple | \n", "162 | \n", "Makati City | \n", "Ayala Triangle Gardens, Salcedo Village, Makat... | \n", "Ayala Triangle Gardens, Salcedo Village, Makat... | \n", "Ayala Triangle Gardens, Salcedo Village, Makat... | \n", "121.023171 | \n", "14.556196 | \n", "Filipino, American, Italian, Bakery | \n", "... | \n", "Botswana Pula(P) | \n", "0 | \n", "0 | \n", "0 | \n", "0 | \n", "3 | \n", "4.4 | \n", "5BA829 | \n", "Very Good | \n", "867 | \n", "
4 | \n", "6315689 | \n", "Bad Bird | \n", "162 | \n", "Makati City | \n", "Hole In The Wall, Floor 4, Century City Mall, ... | \n", "Century City Mall, Poblacion, Makati City | \n", "Century City Mall, Poblacion, Makati City, Mak... | \n", "121.027708 | \n", "14.565899 | \n", "American | \n", "... | \n", "Botswana Pula(P) | \n", "0 | \n", "0 | \n", "0 | \n", "0 | \n", "3 | \n", "4.4 | \n", "5BA829 | \n", "Very Good | \n", "858 | \n", "
5 rows × 21 columns
\n", "