Commit 8e793c68 authored by Masarath Fatima's avatar Masarath Fatima

Databasemigration

parent 9812217b
# Default ignored files
/shelf/
/workspace.xml
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$" />
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>
\ No newline at end of file
<component name="InspectionProjectProfileManager">
<settings>
<option name="USE_PROJECT_PROFILE" value="false" />
<version value="1.0" />
</settings>
</component>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.8 (flaskdatabasemigrations)" project-jdk-type="Python SDK" />
<component name="PyCharmProfessionalAdvertiser">
<option name="shown" value="true" />
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/flaskdatabasemigrations.iml" filepath="$PROJECT_DIR$/.idea/flaskdatabasemigrations.iml" />
</modules>
</component>
</project>
\ No newline at end of file
from flask import Flask, flash, redirect, render_template, \
request, url_for
app = Flask(__name__)
@app.route('/')
def index():
return render_template(
'index.html',
data=[{'name':'MY SQL'}, {'name':'POSTGRES'}, {'name':'SQL SERVER'}])
@app.route("/test" , methods=['GET', 'POST'])
def test():
select = request.form.get('comp_select')
return(str(select)) # just to see what select is
#
if __name__=='__main__':
app.run(debug=True)
#app.run(host='localhost', port=8800)
# @app.route()
# def getting_sql_data():
# if validate.forms:
# if vf== postgres:
# call your postgres.py file
# elif vf==mysql:
# call your mysql.py file
# elif vf==sqlserver:
# call your sqlserver.py file
#
#
#
from flask import Flask, app, request, render_template, session
[Server_Credentials]
db_host = localhost
db_user = root
db_password = fatima@1234
[Database]
db_name = db1
table_names = comments_clean_anonimized,votes
[Google]
CREDENTIALS =C:\\Users\\mfatima\\PycharmProjects\\flaskdatabasemigrations\\schemafiles\\flaskdatabasemigratrions-2624cb452581.json
PROJECT_ID = flaskdatabasemigratrions
DATASET_ID = db1
\ No newline at end of file
[Server_Credentials]
db_host = localhost
db_user = root
db_password = fatima@1234
[Database]
db_name = db2
table_names = deliveries,matches
[Google]
CREDENTIALS =C:\\Users\\mfatima\\PycharmProjects\\flaskdatabasemigrations\\schemafiles\\flaskdatabasemigratrions-2624cb452581.json
PROJECT_ID = flaskdatabasemigratrions
DATASET_ID = db2
\ No newline at end of file
[DATABASES]
mysql_db_list = db1
postgres_db_list = northwind2,pagila
sqlserver_db_list = sample_db,original_db
\ No newline at end of file
[server credentials]
db_host = localhost
db_user = root
db_pass = koti@1234
[Database]
db_name = hr_analytics
table_names =aug_test,aug_train,sample_submission
[]
\ No newline at end of file
[Server_Credentials]
db_host = localhost
db_user = postgres
db_password = 1234
[Database]
db_name = northwind2
table_names = categories,customers,employeeterritories
[Google]
CREDENTIALS =C:\\Users\\mfatima\\PycharmProjects\\flaskdatabasemigrations\\schemafiles\\flaskdatabasemigratrions-2624cb452581.json
PROJECT_ID = flaskdatabasemigratrions
DATASET_ID = northwind2
[Server_Credentials]
db_host = localhost
db_user = root
db_password = fatima@1234
[Database]
db_name =original_db
table_names = commentInteractions
[Google]
CREDENTIALS =C:\\Users\\mfatima\\PycharmProjects\\flaskdatabasemigrations\\schemafiles\\flaskdatabasemigratrions-2624cb452581.json
PROJECT_ID = flaskdatabasemigratrions
DATASET_ID = dboriginal_db2
\ No newline at end of file
[Server_Credentials]
db_host = localhost
db_user = postgres
db_password = 1234
[Database]
db_name = pagila
table_names = actor,category,city,country,film_actor,film_category,inventory
[Google]
CREDENTIALS =C:\\Users\\mfatima\\PycharmProjects\\flaskdatabasemigrations\\schemafiles\\flaskdatabasemigratrions-2624cb452581.json
PROJECT_ID = flaskdatabasemigratrions
DATASET_ID = pagila
[Server_Credentials]
db_host = localhost
db_user = root
db_password = fatima@1234
[Database]
db_name = sample_db
table_names = Test,Train
[Google]
CREDENTIALS =C:\\Users\\mfatima\\PycharmProjects\\flaskdatabasemigrations\\schemafiles\\flaskdatabasemigratrions-2624cb452581.json
PROJECT_ID = flaskdatabasemigratrions
DATASET_ID = sample_dbe3e3
\ No newline at end of file
import configparser
import os
import pandas_gbq as pd_gbq
import pyodbc
import pandas as pd
import logging
from datetime import date
logdate = date.today()
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
formatter = logging.Formatter("%(asctime)s:%(levelname)s:%(message)s")
file_handler = logging.FileHandler(f"C:\\Users\\kprudhvee\\PycharmProjects\\Extractor_BD\\RPA_CMPC_DATABASE_MIGRATIONS\\{logdate}_logfilesDATABASE_MIGRATIONS_LOGS.log")
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
def get_connection(db_server, db_name, db_user, db_pass):
try:
logger.info(f"LOCAL SERVER DATABASE CONNECTION STARTED {db_name} ")
conn = pyodbc.connect('DRIVER={SQL Server};SERVER=%s;DATABASE=%s;UID=%s;PWD=%s' % (db_server, db_name, db_user, db_pass))
except Exception as e:
logger.exception(e)
else:
logger.info(f" SUCCESSFULLY CONNECTED LOCAL SERVER DATABASE {db_name}")
return conn
def read_and_prepare_data(db_name,table):
try:
query2 = "SELECT TOP 10 * from {0}.dbo.{1};".format(db_name, table)
logger.info(f"GETTING TABLE DATA FROM DATABASE {db_name} AND TABLE {table}")
df = pd.read_sql(query2, conn)
df = df.rename(columns={"Descripción": "Descripcion", "Mes_Año": "Mes_Ano","[3PL]":"3PL","[3PLAlexandria]":"3PLAlexandria","[3PLBoise]":"3PLBoise","[3PLPlantas]":"3PLPlantas","[3PLProductos]":"3PLProductos","[3PLReportConsolidado]":"3PLReportConsolidado","[3PLSKU]":"3PLSKU","[3PLWoodgrain]":"3PLWoodgrain","DesviaciónDiasEntrega":"DesviacinDiasEntrega","DesviaciónDiasZarpe":"DesviacionDiasZarpe","DesviaciónDiasProduccion":"DesviacionDiasProduccion","DesviaciónDiasTransitoEnAgua":"DesviacionDiasTransitoEnAgua"}, inplace=False)
except Exception as e:
logger.exception(e)
else:
logger.info(f"SUCCESSFULY GOT TABLE DATA FROM DATABASE {db_name} AND TABLE {table}")
return df
def load_into_bq(df, table_id,project_id,schema_json):
try:
logger.info(f"LOADING TABLE DATA TO BIGQUERY TABLE_ID {table_id}")
pd_gbq.to_gbq(df, table_id,
project_id=project_id,
table_schema=schema_json,
if_exists='replace')
logger.info(f"SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID {table_id}")
except Exception as e:
logger.exception(e)
### CREATING CONFIG OBJECT FOR CALLING EVERY CONFIG FILES
config = configparser.ConfigParser()
###reading databases list config file
try:
config.read("C:\\Users\\kprudhvee\\PycharmProjects\\Extractor_BD\\RPA_CMPC_DATABASE_MIGRATIONS\\config_folders\\CONFIG_DATABASES.ini")
except FileNotFoundError as e:
logger.exception(e)
databases = config['DATABASES']['databases']
databases_list=databases.split(",")
for database in databases_list:
logger.info(f"started {database}")
logger.info("========================================================================")
try:
logger.info(f"GETTING CONFIGORATION FILE FOR DATABASE {database}")
config.read(f'C:\\Users\\kprudhvee\\PycharmProjects\\Extractor_BD\\RPA_CMPC_DATABASE_MIGRATIONS\\config_folders\\CONFIG_{database}.ini')
logger.info(f"SUCEESSFULLY LOADED CONFIGORATION FILE FOR DATABASE {database}")
except FileNotFoundError as e:
logger.exception(e)
### CALLING SCHEMA CSV FILE AND CONVERTING TO DATAFRAME
try:
logger.info(f"GETIING SCHEMA FILE FOR DATABASE {database}")
database_config_df = pd.read_csv(f"C:\\Users\\kprudhvee\\PycharmProjects\\Extractor_BD\\RPA_CMPC_DATABASE_MIGRATIONS\\data_files\\{database}.CSV")
logger.info(f"SUCEESSFULLY LOADED SCHEMA FILE FOR DATABASE {database}")
except FileNotFoundError as e:
logger.exception(e)
db_user=config['Server_Credentials']['DB_USER']
db_pass=config['Server_Credentials']['DB_PASSWORD']
db_server=config['Server_Credentials']['DB_HOST']
project_id =config['GOOGLE']['PROJECT_ID']
dataset_id = config['GOOGLE']['DATASET_ID']
os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = config['GOOGLE']['CREDENTIALS']
db_name=config['Database']['db_name']
table_names = config['Database']['table_names']
table_names_list=table_names.split(",")
conn=get_connection(db_server, db_name, db_user, db_pass)
cursor = conn.cursor()
for table in table_names_list:
print(table)
df=read_and_prepare_data(db_name,table)
#CONVERTING SCHEMA DF INTO JSON OBJECT
get_unique_columns = pd.unique(database_config_df['table'])
demo_df = database_config_df[database_config_df['table'] == table]
schema_df = demo_df[["name", "type", "mode"]]
schema_json = schema_df.to_dict('records')
# WRITING TO BIGQUERY
table_id = '{0}.{1}'.format(dataset_id,table)
load_into_bq(df, table_id,project_id,schema_json)
logger.info("========================================================================")
\ No newline at end of file
# Default ignored files
/shelf/
/workspace.xml
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$" />
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>
\ No newline at end of file
<component name="InspectionProjectProfileManager">
<settings>
<option name="USE_PROJECT_PROFILE" value="false" />
<version value="1.0" />
</settings>
</component>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.8 (flaskdatabasemigrations)" project-jdk-type="Python SDK" />
<component name="PyCharmProfessionalAdvertiser">
<option name="shown" value="true" />
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/flaskdatabasemigrations.iml" filepath="$PROJECT_DIR$/.idea/flaskdatabasemigrations.iml" />
</modules>
</component>
</project>
\ No newline at end of file
from flask import Flask, flash, redirect, render_template, \
request, url_for
app = Flask(__name__)
@app.route('/')
def index():
return render_template(
'index.html',
data=[{'name':'MY SQL'}, {'name':'POSTGRES'}, {'name':'SQL SERVER'}])
@app.route("/test" , methods=['GET', 'POST'])
def test():
select = request.form.get('comp_select')
return(str(select)) # just to see what select is
#
if __name__=='__main__':
app.run(debug=True)
#app.run(host='localhost', port=8800)
# @app.route()
# def getting_sql_data():
# if validate.forms:
# if vf== postgres:
# call your postgres.py file
# elif vf==mysql:
# call your mysql.py file
# elif vf==sqlserver:
# call your sqlserver.py file
#
#
#
from flask import Flask, app, request, render_template, session
[Server_Credentials]
db_host = localhost
db_user = root
db_password = fatima@1234
[Database]
db_name = db1
table_names = comments_clean_anonimized,votes
[Google]
CREDENTIALS =C:\\Users\\mfatima\\PycharmProjects\\flaskdatabasemigrations\\schemafiles\\flaskdatabasemigratrions-2624cb452581.json
PROJECT_ID = flaskdatabasemigratrions
DATASET_ID = db1
\ No newline at end of file
[Server_Credentials]
db_host = localhost
db_user = root
db_password = fatima@1234
[Database]
db_name = db2
table_names = deliveries,matches
[Google]
CREDENTIALS =C:\\Users\\mfatima\\PycharmProjects\\flaskdatabasemigrations\\schemafiles\\flaskdatabasemigratrions-2624cb452581.json
PROJECT_ID = flaskdatabasemigratrions
DATASET_ID = db2
\ No newline at end of file
[DATABASES]
mysql_db_list = db1
postgres_db_list = northwind2,pagila
sqlserver_db_list = sample_db,original_db
\ No newline at end of file
[server credentials]
db_host = localhost
db_user = root
db_pass = koti@1234
[Database]
db_name = hr_analytics
table_names =aug_test,aug_train,sample_submission
[]
\ No newline at end of file
[Server_Credentials]
db_host = localhost
db_user = postgres
db_password = 1234
[Database]
db_name = northwind2
table_names = categories,customers,employeeterritories
[Google]
CREDENTIALS =C:\\Users\\mfatima\\PycharmProjects\\flaskdatabasemigrations\\schemafiles\\flaskdatabasemigratrions-2624cb452581.json
PROJECT_ID = flaskdatabasemigratrions
DATASET_ID = northwind2
[Server_Credentials]
db_host = localhost
db_user = root
db_password = fatima@1234
[Database]
db_name =original_db
table_names = commentInteractions
[Google]
CREDENTIALS =C:\\Users\\mfatima\\PycharmProjects\\flaskdatabasemigrations\\schemafiles\\flaskdatabasemigratrions-2624cb452581.json
PROJECT_ID = flaskdatabasemigratrions
DATASET_ID = dboriginal_db2
\ No newline at end of file
[Server_Credentials]
db_host = localhost
db_user = postgres
db_password = 1234
[Database]
db_name = pagila
table_names = actor,category,city,country,film_actor,film_category,inventory
[Google]
CREDENTIALS =C:\\Users\\mfatima\\PycharmProjects\\flaskdatabasemigrations\\schemafiles\\flaskdatabasemigratrions-2624cb452581.json
PROJECT_ID = flaskdatabasemigratrions
DATASET_ID = pagila
[Server_Credentials]
db_host = localhost
db_user = root
db_password = fatima@1234
[Database]
db_name = sample_db
table_names = Test,Train
[Google]
CREDENTIALS =C:\\Users\\mfatima\\PycharmProjects\\flaskdatabasemigrations\\schemafiles\\flaskdatabasemigratrions-2624cb452581.json
PROJECT_ID = flaskdatabasemigratrions
DATASET_ID = sample_db
\ No newline at end of file
import configparser
import os
import pandas_gbq as pd_gbq
import pyodbc
import pandas as pd
import logging
from datetime import date
logdate = date.today()
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
formatter = logging.Formatter("%(asctime)s:%(levelname)s:%(message)s")
file_handler = logging.FileHandler(f"C:\\Users\\kprudhvee\\PycharmProjects\\Extractor_BD\\RPA_CMPC_DATABASE_MIGRATIONS\\{logdate}_logfilesDATABASE_MIGRATIONS_LOGS.log")
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
def get_connection(db_server, db_name, db_user, db_pass):
try:
logger.info(f"LOCAL SERVER DATABASE CONNECTION STARTED {db_name} ")
conn = pyodbc.connect('DRIVER={SQL Server};SERVER=%s;DATABASE=%s;UID=%s;PWD=%s' % (db_server, db_name, db_user, db_pass))
except Exception as e:
logger.exception(e)
else:
logger.info(f" SUCCESSFULLY CONNECTED LOCAL SERVER DATABASE {db_name}")
return conn
def read_and_prepare_data(db_name,table):
try:
query2 = "SELECT TOP 10 * from {0}.dbo.{1};".format(db_name, table)
logger.info(f"GETTING TABLE DATA FROM DATABASE {db_name} AND TABLE {table}")
df = pd.read_sql(query2, conn)
df = df.rename(columns={"Descripción": "Descripcion", "Mes_Año": "Mes_Ano","[3PL]":"3PL","[3PLAlexandria]":"3PLAlexandria","[3PLBoise]":"3PLBoise","[3PLPlantas]":"3PLPlantas","[3PLProductos]":"3PLProductos","[3PLReportConsolidado]":"3PLReportConsolidado","[3PLSKU]":"3PLSKU","[3PLWoodgrain]":"3PLWoodgrain","DesviaciónDiasEntrega":"DesviacinDiasEntrega","DesviaciónDiasZarpe":"DesviacionDiasZarpe","DesviaciónDiasProduccion":"DesviacionDiasProduccion","DesviaciónDiasTransitoEnAgua":"DesviacionDiasTransitoEnAgua"}, inplace=False)
except Exception as e:
logger.exception(e)
else:
logger.info(f"SUCCESSFULY GOT TABLE DATA FROM DATABASE {db_name} AND TABLE {table}")
return df
def load_into_bq(df, table_id,project_id,schema_json):
try:
logger.info(f"LOADING TABLE DATA TO BIGQUERY TABLE_ID {table_id}")
pd_gbq.to_gbq(df, table_id,
project_id=project_id,
table_schema=schema_json,
if_exists='replace')
logger.info(f"SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID {table_id}")
except Exception as e:
logger.exception(e)
### CREATING CONFIG OBJECT FOR CALLING EVERY CONFIG FILES
config = configparser.ConfigParser()
###reading databases list config file
try:
config.read("C:\\Users\\kprudhvee\\PycharmProjects\\Extractor_BD\\RPA_CMPC_DATABASE_MIGRATIONS\\config_folders\\CONFIG_DATABASES.ini")
except FileNotFoundError as e:
logger.exception(e)
databases = config['DATABASES']['databases']
databases_list=databases.split(",")
for database in databases_list:
logger.info(f"started {database}")
logger.info("========================================================================")
try:
logger.info(f"GETTING CONFIGORATION FILE FOR DATABASE {database}")
config.read(f'C:\\Users\\kprudhvee\\PycharmProjects\\Extractor_BD\\RPA_CMPC_DATABASE_MIGRATIONS\\config_folders\\CONFIG_{database}.ini')
logger.info(f"SUCEESSFULLY LOADED CONFIGORATION FILE FOR DATABASE {database}")
except FileNotFoundError as e:
logger.exception(e)
### CALLING SCHEMA CSV FILE AND CONVERTING TO DATAFRAME
try:
logger.info(f"GETIING SCHEMA FILE FOR DATABASE {database}")
database_config_df = pd.read_csv(f"C:\\Users\\kprudhvee\\PycharmProjects\\Extractor_BD\\RPA_CMPC_DATABASE_MIGRATIONS\\data_files\\{database}.CSV")
logger.info(f"SUCEESSFULLY LOADED SCHEMA FILE FOR DATABASE {database}")
except FileNotFoundError as e:
logger.exception(e)
db_user=config['Server_Credentials']['DB_USER']
db_pass=config['Server_Credentials']['DB_PASSWORD']
db_server=config['Server_Credentials']['DB_HOST']
project_id =config['GOOGLE']['PROJECT_ID']
dataset_id = config['GOOGLE']['DATASET_ID']
os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = config['GOOGLE']['CREDENTIALS']
db_name=config['Database']['db_name']
table_names = config['Database']['table_names']
table_names_list=table_names.split(",")
conn=get_connection(db_server, db_name, db_user, db_pass)
cursor = conn.cursor()
for table in table_names_list:
print(table)
df=read_and_prepare_data(db_name,table)
#CONVERTING SCHEMA DF INTO JSON OBJECT
get_unique_columns = pd.unique(database_config_df['table'])
demo_df = database_config_df[database_config_df['table'] == table]
schema_df = demo_df[["name", "type", "mode"]]
schema_json = schema_df.to_dict('records')
# WRITING TO BIGQUERY
table_id = '{0}.{1}'.format(dataset_id,table)
load_into_bq(df, table_id,project_id,schema_json)
logger.info("========================================================================")
\ No newline at end of file
from flask import Flask, flash, redirect, render_template, \
request, url_for
from onpremisesdatabases.mysql1 import MSQLPOOl
from onpremisesdatabases.postgres1 import POSTGRESPOOL
from onpremisesdatabases.sqlserver1 import SQLSERVERPOOL
import configparser
import pandas as pd
import os
from google.cloud import bigquery
from google.cloud.exceptions import NotFound
import pickle
app = Flask(__name__)
@app.route('/')
def index():
return render_template(
'index.html',
data=[{'name':'MY SQL'}, {'name':'POSTGRES'}, {'name':'SQL SERVER'}])
@app.route("/test" , methods=['GET', 'POST'])
def test():
config = configparser.ConfigParser()
select = request.form.get('comp_select')
list1=[]
if select == "MY SQL":
mysqlpool = MSQLPOOl()
try:
config.read("C:\\Users\\mfatima\\PycharmProjects\\flaskdatabasemigrations\\configure\\db_config.ini")
except FileNotFoundError as e:
print(e)
databases = config['DATABASES']['mysql_db_list']
db_lists = databases.split(",") # ['db1',"db2"]
for db in db_lists:
try:
config.read(f"C:\\Users\\mfatima\\PycharmProjects\\flaskdatabasemigrations\\configure\\{db}_config.ini")
except FileNotFoundError as e:
print(e)
try:
schema_df = pd.read_csv(f"C:\\Users\\mfatima\\PycharmProjects\\flaskdatabasemigrations\\schemafiles\\{db}_config.csv")
except FileNotFoundError as e:
print(e)
db_user = config['Server_Credentials']['db_user']
db_pass = config['Server_Credentials']['db_password']
db_server = config['Server_Credentials']['db_host']
db_name = config['Database']['db_name']
table_name = config['Database']['table_names']
table_names_list = table_name.split(",")
print(table_names_list)
conn = mysqlpool.get_connection(db_server, db_user, db_pass)
cursor = conn.cursor()
len_table=len(table_names_list)
#list1 = []
for table in table_names_list:
print(table)
df = mysqlpool.read_and_prepare_data(db_name, table,conn)
project_id = config['Google']['PROJECT_ID']
dataset_id = config['Google']['DATASET_ID']
os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = config['Google']['CREDENTIALS']
get_unique_columns = pd.unique(schema_df['table'])
demo_df = schema_df[schema_df['table'] == table]
schema_df1 = demo_df[['name', 'type', 'mode']]
schema_json = schema_df1.to_dict('records')
table_id = '{0}.{1}'.format(dataset_id, table)
mysqlpool.load_into_bq(df, table_id, project_id, schema_json)
print(f"loaded successfully {table}")
# status=True
client = bigquery.Client()
try:
client.get_table(table_id)
status =True
except NotFound:
status=False
list = [select,db_name,table,status]
list1.append(list)
print(list1)
filename = 'my_data'
outfile = open(filename, 'wb')
pickle.dump(list1, outfile)
outfile.close()
elif select == "POSTGRES":
postgrespool = POSTGRESPOOL()
try:
config.read("C:\\Users\\mfatima\\PycharmProjects\\flaskdatabasemigrations\\configure\\db_config.ini")
except FileNotFoundError as e:
print(e)
databases = config['DATABASES']['postgres_db_list']
db_lists = databases.split(",")
for db in db_lists:
try:
config.read(f"C:\\Users\\mfatima\\PycharmProjects\\flaskdatabasemigrations\\configure\\{db}_config.ini")
except FileNotFoundError as e:
print(e)
try:
schema_df = pd.read_csv(
f"C:\\Users\\mfatima\\PycharmProjects\\flaskdatabasemigrations\\schemafiles\\{db}_config.csv")
except FileNotFoundError as e:
print(e)
db_user = config['Server_Credentials']['db_user']
db_pass = config['Server_Credentials']['db_password']
db_server = config['Server_Credentials']['db_host']
db_name = config['Database']['db_name']
table_name = config['Database']['table_names']
table_names_list = table_name.split(",")
conn = postgrespool.get_connection(db_server, db_user, db_pass, db_name)
cursor = conn.cursor()
for table in table_names_list:
print(table)
df = postgrespool.read_and_prepare_data(table,conn)
project_id = config['Google']['PROJECT_ID']
dataset_id = config['Google']['DATASET_ID']
os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = config['Google']['CREDENTIALS']
get_unique_columns = pd.unique(schema_df['table'])
demo_df = schema_df[schema_df['table'] == table]
schema_df1 = demo_df[['name', 'type', 'mode']]
schema_json = schema_df1.to_dict('records')
table_id = '{0}.{1}'.format(dataset_id, table)
postgrespool.load_into_bq(df, table_id, project_id, schema_json)
print(f"loaded successfully {table}")
client = bigquery.Client()
try:
client.get_table(table_id)
status = True
except NotFound:
status = False
list = [select, db_name, table, status]
list1.append(list)
print(list1)
filename = 'my_data'
outfile = open(filename, 'wb')
pickle.dump(list1, outfile)
outfile.close()
elif select == "SQL SERVER":
sqlserverpool=SQLSERVERPOOL()
try:
config.read("C:\\Users\\mfatima\\PycharmProjects\\flaskdatabasemigrations\\configure\\db_config.ini")
except FileNotFoundError as e:
print(e)
databases = config['DATABASES']['sqlserver_db_list']
db_lists = databases.split(",")
for db in db_lists:
try:
config.read(f"C:\\Users\\mfatima\\PycharmProjects\\flaskdatabasemigrations\\configure\\{db}_config.ini")
except FileNotFoundError as e:
print(e)
try:
schema_df = pd.read_csv(
f"C:\\Users\\mfatima\\PycharmProjects\\flaskdatabasemigrations\\schemafiles\\{db}_config.csv")
except FileNotFoundError as e:
print(e)
db_name = config['Database']['db_name']
table_name = config['Database']['table_names']
table_names_list = table_name.split(",")
conn = sqlserverpool.get_connection(db_name)
cursor = conn.cursor()
for table in table_names_list:
print(table)
df = sqlserverpool.read_and_prepare_data(db_name,table,conn)
project_id = config['Google']['PROJECT_ID']
dataset_id = config['Google']['DATASET_ID']
os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = config['Google']['CREDENTIALS']
get_unique_columns = pd.unique(schema_df['table'])
demo_df = schema_df[schema_df['table'] == table]
schema_df1 = demo_df[['name', 'type', 'mode']]
schema_json = schema_df1.to_dict('records')
table_id = '{0}.{1}'.format(dataset_id, table)
sqlserverpool.load_into_bq(df, table_id, project_id, schema_json)
print(f"loaded successfully {table}")
client = bigquery.Client()
try:
client.get_table(table_id)
status = True
except NotFound:
status = False
list = [select, db_name, table, status]
list1.append(list)
print(list1)
filename = 'my_data'
outfile = open(filename, 'wb')
pickle.dump(list1, outfile)
outfile.close()
return(redirect(url_for("thankyou",items=list1)))
@app.route('/thankyou',methods=['GET', 'POST'])
def thankyou():
infile = open("my_data", 'rb')
list = pickle.load(infile)
infile.close()
return render_template("thankyou.html",items=list)
if __name__=='__main__':
app.run(debug=True)
\ No newline at end of file
from wtforms import SelectField,SubmitField
from flask_wtf import FlaskForm
from wtforms.validators import DataRequired
class DBForm(FlaskForm):
DB_NAMES = SelectField('DBNAMES', choices = [('My SQL', 'My SQL'), ('SQL Server', 'SQL Server'),('Postgres','Postgres')])
# subcategory = SelectField('Sub Category', choices = [('USPA', 'USPA'), ('LEE', 'LEE'), ('FOSSIL', 'FOSSIL'), ('TITAN', 'TITAN')])
submit = SubmitField('SUBMIT',validators=[DataRequired()])
2022-01-12 14:03:14,949:INFO:started northwind2
2022-01-12 14:03:14,949:INFO:========================================================================
2022-01-12 14:03:14,949:INFO:GETTING CONFIGURATION FILE FOR DATABASE northwind2
2022-01-12 14:03:14,949:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE northwind2
2022-01-12 14:03:14,953:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE northwind2
2022-01-12 14:03:14,953:INFO:GETTING SCHEMA FILE FOR DATABASE northwind2
2022-01-12 14:03:14,962:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE northwind2
2022-01-12 14:03:15,124:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE northwind2 AND TABLE categories
2022-01-12 14:03:25,628:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID northwind2.categories
2022-01-12 14:03:25,629:INFO:========================================================================
2022-01-12 14:03:25,659:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE northwind2 AND TABLE customers
2022-01-12 14:03:34,024:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID northwind2.customers
2022-01-12 14:03:34,025:INFO:========================================================================
2022-01-12 14:03:34,045:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE northwind2 AND TABLE employeeterritories
2022-01-12 14:03:45,344:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID northwind2.employeeterritories
2022-01-12 14:03:45,345:INFO:========================================================================
2022-01-12 14:03:45,345:INFO:started pagila
2022-01-12 14:03:45,345:INFO:========================================================================
2022-01-12 14:03:45,346:INFO:GETTING CONFIGURATION FILE FOR DATABASE pagila
2022-01-12 14:03:45,346:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE pagila
2022-01-12 14:03:45,351:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE pagila
2022-01-12 14:03:45,352:INFO:GETTING SCHEMA FILE FOR DATABASE pagila
2022-01-12 14:03:45,368:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE pagila
2022-01-12 14:03:45,652:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE actor
2022-01-12 14:03:56,560:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.actor
2022-01-12 14:03:56,560:INFO:========================================================================
2022-01-12 14:03:56,572:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE category
2022-01-12 14:04:06,777:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.category
2022-01-12 14:04:06,777:INFO:========================================================================
2022-01-12 14:04:06,789:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE city
2022-01-12 14:04:18,465:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.city
2022-01-12 14:04:18,465:INFO:========================================================================
2022-01-12 14:04:18,479:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE country
2022-01-12 14:04:27,211:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.country
2022-01-12 14:04:27,211:INFO:========================================================================
2022-01-12 14:04:27,322:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE film_actor
2022-01-12 14:04:36,077:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.film_actor
2022-01-12 14:04:36,077:INFO:========================================================================
2022-01-12 14:04:36,110:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE film_category
2022-01-12 14:04:45,495:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.film_category
2022-01-12 14:04:45,496:INFO:========================================================================
2022-01-12 14:04:45,528:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE inventory
2022-01-12 14:05:01,519:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.inventory
2022-01-12 14:05:01,519:INFO:========================================================================
2022-01-12 14:09:17,085:INFO:started sample_db
2022-01-12 14:09:17,085:INFO:========================================================================
2022-01-12 14:09:17,085:INFO:GETTING CONFIGURATION FILE FOR DATABASE sample_db
2022-01-12 14:09:17,085:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE sample_db
2022-01-12 14:09:17,088:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE sample_db
2022-01-12 14:09:17,089:INFO:GETTING SCHEMA FILE FOR DATABASE sample_db
2022-01-12 14:09:17,095:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE sample_db
2022-01-12 14:09:18,137:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE sample_db AND TABLE Test
2022-01-12 14:09:30,374:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID sample_db.Test
2022-01-12 14:09:32,562:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE sample_db AND TABLE Train
2022-01-12 14:09:51,779:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID sample_db.Train
2022-01-12 14:09:51,779:INFO:========================================================================
2022-01-12 14:09:51,779:INFO:started original_db
2022-01-12 14:09:51,780:INFO:========================================================================
2022-01-12 14:09:51,780:INFO:GETTING CONFIGURATION FILE FOR DATABASE original_db
2022-01-12 14:09:51,781:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE original_db
2022-01-12 14:09:51,790:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE original_db
2022-01-12 14:09:51,790:INFO:GETTING SCHEMA FILE FOR DATABASE original_db
2022-01-12 14:09:51,809:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE original_db
2022-01-12 14:09:53,802:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE original_db AND TABLE commentInteractions
2022-01-12 14:10:05,802:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID dboriginal_db2.commentInteractions
2022-01-12 14:10:05,802:INFO:========================================================================
2022-01-12 14:19:57,429:INFO:started db1
2022-01-12 14:19:57,430:INFO:========================================================================
2022-01-12 14:19:57,430:INFO:GETTING CONFIGURATION FILE FOR DATABASE db1
2022-01-12 14:19:57,430:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE db1
2022-01-12 14:19:57,433:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE db1
2022-01-12 14:19:57,433:INFO:GETTING SCHEMA FILE FOR DATABASE db1
2022-01-12 14:19:57,441:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE db1
2022-01-12 14:19:59,083:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE db1 AND TABLE comments_clean_anonimized
2022-01-12 14:20:10,472:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID db1.comments_clean_anonimized
2022-01-12 14:20:10,473:INFO:========================================================================
2022-01-12 14:20:53,157:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE db1 AND TABLE votes
2022-01-12 14:21:04,127:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID db1.votes
2022-01-12 14:21:04,128:INFO:========================================================================
2022-01-12 14:21:04,128:INFO:started db2
2022-01-12 14:21:04,128:INFO:========================================================================
2022-01-12 14:21:04,128:INFO:GETTING CONFIGURATION FILE FOR DATABASE db2
2022-01-12 14:21:04,129:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE db2
2022-01-12 14:21:04,139:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE db2
2022-01-12 14:21:04,139:INFO:GETTING SCHEMA FILE FOR DATABASE db2
2022-01-12 14:21:04,163:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE db2
2022-01-12 14:21:11,928:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE db2 AND TABLE deliveries
2022-01-12 14:21:20,612:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID db2.deliveries
2022-01-12 14:21:20,612:INFO:========================================================================
2022-01-12 14:21:21,119:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE db2 AND TABLE matches
2022-01-12 14:21:33,315:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID db2.matches
2022-01-12 14:21:33,316:INFO:========================================================================
2022-01-13 16:45:25,905:INFO:started sample_db
2022-01-13 16:45:25,905:INFO:========================================================================
2022-01-13 16:45:25,905:INFO:GETTING CONFIGURATION FILE FOR DATABASE sample_db
2022-01-13 16:45:25,905:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE sample_db
2022-01-13 16:45:25,906:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE sample_db
2022-01-13 16:45:25,907:INFO:GETTING SCHEMA FILE FOR DATABASE sample_db
2022-01-13 16:45:25,910:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE sample_db
2022-01-13 16:45:26,850:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE sample_db AND TABLE Test
2022-01-13 16:45:37,103:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID sample_db.Test
2022-01-13 16:45:39,114:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE sample_db AND TABLE Train
2022-01-13 16:45:56,487:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID sample_db.Train
2022-01-13 16:45:56,487:INFO:========================================================================
2022-01-13 16:45:56,487:INFO:started original_db
2022-01-13 16:45:56,487:INFO:========================================================================
2022-01-13 16:45:56,487:INFO:GETTING CONFIGURATION FILE FOR DATABASE original_db
2022-01-13 16:45:56,487:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE original_db
2022-01-13 16:45:56,489:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE original_db
2022-01-13 16:45:56,490:INFO:GETTING SCHEMA FILE FOR DATABASE original_db
2022-01-13 16:45:56,494:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE original_db
2022-01-13 16:45:58,398:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE original_db AND TABLE commentInteractions
2022-01-13 16:46:14,270:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID dboriginal_db2.commentInteractions
2022-01-13 16:46:14,270:INFO:========================================================================
2022-01-13 16:46:14,334:INFO:started db1
2022-01-13 16:46:14,334:INFO:========================================================================
2022-01-13 16:46:14,334:INFO:GETTING CONFIGURATION FILE FOR DATABASE db1
2022-01-13 16:46:14,334:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE db1
2022-01-13 16:46:14,336:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE db1
2022-01-13 16:46:14,336:INFO:GETTING SCHEMA FILE FOR DATABASE db1
2022-01-13 16:46:14,339:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE db1
2022-01-13 16:46:14,769:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE db1 AND TABLE comments_clean_anonimized
2022-01-13 16:46:29,152:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID db1.comments_clean_anonimized
2022-01-13 16:46:29,153:INFO:========================================================================
2022-01-13 16:47:11,219:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE db1 AND TABLE votes
2022-01-13 16:47:23,408:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID db1.votes
2022-01-13 16:47:23,409:INFO:========================================================================
2022-01-13 16:47:23,409:INFO:started db2
2022-01-13 16:47:23,409:INFO:========================================================================
2022-01-13 16:47:23,409:INFO:GETTING CONFIGURATION FILE FOR DATABASE db2
2022-01-13 16:47:23,409:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE db2
2022-01-13 16:47:23,412:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE db2
2022-01-13 16:47:23,412:INFO:GETTING SCHEMA FILE FOR DATABASE db2
2022-01-13 16:47:23,419:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE db2
2022-01-13 16:47:25,813:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE db2 AND TABLE deliveries
2022-01-13 16:47:36,848:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID db2.deliveries
2022-01-13 16:47:36,848:INFO:========================================================================
2022-01-13 16:47:37,030:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE db2 AND TABLE matches
2022-01-13 16:47:46,351:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID db2.matches
2022-01-13 16:47:46,351:INFO:========================================================================
2022-01-13 16:47:46,399:INFO:started northwind2
2022-01-13 16:47:46,399:INFO:========================================================================
2022-01-13 16:47:46,399:INFO:GETTING CONFIGURATION FILE FOR DATABASE northwind2
2022-01-13 16:47:46,400:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE northwind2
2022-01-13 16:47:46,401:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE northwind2
2022-01-13 16:47:46,401:INFO:GETTING SCHEMA FILE FOR DATABASE northwind2
2022-01-13 16:47:46,404:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE northwind2
2022-01-13 16:47:46,553:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE northwind2 AND TABLE categories
2022-01-13 16:47:55,969:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID northwind2.categories
2022-01-13 16:47:55,969:INFO:========================================================================
2022-01-13 16:47:55,984:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE northwind2 AND TABLE customers
2022-01-13 16:48:04,326:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID northwind2.customers
2022-01-13 16:48:04,326:INFO:========================================================================
2022-01-13 16:48:04,331:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE northwind2 AND TABLE employeeterritories
2022-01-13 16:48:12,329:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID northwind2.employeeterritories
2022-01-13 16:48:12,330:INFO:========================================================================
2022-01-13 16:48:12,331:INFO:started pagila
2022-01-13 16:48:12,331:INFO:========================================================================
2022-01-13 16:48:12,331:INFO:GETTING CONFIGURATION FILE FOR DATABASE pagila
2022-01-13 16:48:12,332:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE pagila
2022-01-13 16:48:12,335:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE pagila
2022-01-13 16:48:12,336:INFO:GETTING SCHEMA FILE FOR DATABASE pagila
2022-01-13 16:48:12,347:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE pagila
2022-01-13 16:48:12,518:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE actor
2022-01-13 16:48:21,202:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.actor
2022-01-13 16:48:21,202:INFO:========================================================================
2022-01-13 16:48:21,210:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE category
2022-01-13 16:48:32,508:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.category
2022-01-13 16:48:32,508:INFO:========================================================================
2022-01-13 16:48:32,520:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE city
2022-01-13 16:48:40,351:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.city
2022-01-13 16:48:40,351:INFO:========================================================================
2022-01-13 16:48:40,355:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE country
2022-01-13 16:48:49,213:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.country
2022-01-13 16:48:49,213:INFO:========================================================================
2022-01-13 16:48:49,247:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE film_actor
2022-01-13 16:49:00,374:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.film_actor
2022-01-13 16:49:00,375:INFO:========================================================================
2022-01-13 16:49:00,382:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE film_category
2022-01-13 16:49:08,720:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.film_category
2022-01-13 16:49:08,720:INFO:========================================================================
2022-01-13 16:49:08,759:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE inventory
2022-01-13 16:49:22,343:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.inventory
2022-01-13 16:49:22,344:INFO:========================================================================
2022-01-13 16:49:25,949:INFO:started sample_db
2022-01-13 16:49:25,950:INFO:========================================================================
2022-01-13 16:49:25,950:INFO:GETTING CONFIGURATION FILE FOR DATABASE sample_db
2022-01-13 16:49:25,950:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE sample_db
2022-01-13 16:49:25,952:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE sample_db
2022-01-13 16:49:25,952:INFO:GETTING SCHEMA FILE FOR DATABASE sample_db
2022-01-13 16:49:25,955:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE sample_db
2022-01-13 16:49:27,082:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE sample_db AND TABLE Test
2022-01-13 16:49:39,627:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID sample_db.Test
2022-01-13 16:49:41,959:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE sample_db AND TABLE Train
2022-01-13 16:49:55,427:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID sample_db.Train
2022-01-13 16:49:55,427:INFO:========================================================================
2022-01-13 16:49:55,427:INFO:started original_db
2022-01-13 16:49:55,427:INFO:========================================================================
2022-01-13 16:49:55,427:INFO:GETTING CONFIGURATION FILE FOR DATABASE original_db
2022-01-13 16:49:55,427:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE original_db
2022-01-13 16:49:55,429:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE original_db
2022-01-13 16:49:55,429:INFO:GETTING SCHEMA FILE FOR DATABASE original_db
2022-01-13 16:49:55,433:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE original_db
2022-01-13 16:49:57,343:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE original_db AND TABLE commentInteractions
2022-01-13 16:50:08,647:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID dboriginal_db2.commentInteractions
2022-01-13 16:50:08,647:INFO:========================================================================
2022-01-13 16:50:08,731:INFO:started db1
2022-01-13 16:50:08,732:INFO:========================================================================
2022-01-13 16:50:08,732:INFO:GETTING CONFIGURATION FILE FOR DATABASE db1
2022-01-13 16:50:08,732:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE db1
2022-01-13 16:50:08,734:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE db1
2022-01-13 16:50:08,734:INFO:GETTING SCHEMA FILE FOR DATABASE db1
2022-01-13 16:50:08,738:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE db1
2022-01-13 16:50:09,238:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE db1 AND TABLE comments_clean_anonimized
2022-01-13 16:50:17,677:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID db1.comments_clean_anonimized
2022-01-13 16:50:17,677:INFO:========================================================================
2022-01-13 16:50:58,098:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE db1 AND TABLE votes
2022-01-13 16:51:07,543:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID db1.votes
2022-01-13 16:51:07,543:INFO:========================================================================
2022-01-13 16:51:07,543:INFO:started db2
2022-01-13 16:51:07,544:INFO:========================================================================
2022-01-13 16:51:07,544:INFO:GETTING CONFIGURATION FILE FOR DATABASE db2
2022-01-13 16:51:07,544:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE db2
2022-01-13 16:51:07,546:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE db2
2022-01-13 16:51:07,547:INFO:GETTING SCHEMA FILE FOR DATABASE db2
2022-01-13 16:51:07,551:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE db2
2022-01-13 16:51:09,714:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE db2 AND TABLE deliveries
2022-01-13 16:51:17,708:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID db2.deliveries
2022-01-13 16:51:17,708:INFO:========================================================================
2022-01-13 16:51:17,917:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE db2 AND TABLE matches
2022-01-13 16:51:25,433:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID db2.matches
2022-01-13 16:51:25,434:INFO:========================================================================
2022-01-13 16:51:25,476:INFO:started northwind2
2022-01-13 16:51:25,476:INFO:========================================================================
2022-01-13 16:51:25,476:INFO:GETTING CONFIGURATION FILE FOR DATABASE northwind2
2022-01-13 16:51:25,476:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE northwind2
2022-01-13 16:51:25,478:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE northwind2
2022-01-13 16:51:25,478:INFO:GETTING SCHEMA FILE FOR DATABASE northwind2
2022-01-13 16:51:25,483:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE northwind2
2022-01-13 16:51:25,680:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE northwind2 AND TABLE categories
2022-01-13 16:51:34,294:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID northwind2.categories
2022-01-13 16:51:34,295:INFO:========================================================================
2022-01-13 16:51:34,314:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE northwind2 AND TABLE customers
2022-01-13 16:51:42,817:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID northwind2.customers
2022-01-13 16:51:42,817:INFO:========================================================================
2022-01-13 16:51:42,823:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE northwind2 AND TABLE employeeterritories
2022-01-13 16:51:51,801:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID northwind2.employeeterritories
2022-01-13 16:51:51,801:INFO:========================================================================
2022-01-13 16:51:51,801:INFO:started pagila
2022-01-13 16:51:51,802:INFO:========================================================================
2022-01-13 16:51:51,802:INFO:GETTING CONFIGURATION FILE FOR DATABASE pagila
2022-01-13 16:51:51,802:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE pagila
2022-01-13 16:51:51,804:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE pagila
2022-01-13 16:51:51,804:INFO:GETTING SCHEMA FILE FOR DATABASE pagila
2022-01-13 16:51:51,811:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE pagila
2022-01-13 16:51:51,985:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE actor
2022-01-13 16:52:01,668:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.actor
2022-01-13 16:52:01,668:INFO:========================================================================
2022-01-13 16:52:01,675:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE category
2022-01-13 16:52:19,444:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.category
2022-01-13 16:52:19,445:INFO:========================================================================
2022-01-13 16:52:19,456:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE city
2022-01-13 16:52:28,541:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.city
2022-01-13 16:52:28,542:INFO:========================================================================
2022-01-13 16:52:28,553:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE country
2022-01-13 16:52:38,797:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.country
2022-01-13 16:52:38,797:INFO:========================================================================
2022-01-13 16:52:38,838:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE film_actor
2022-01-13 16:52:48,696:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.film_actor
2022-01-13 16:52:48,696:INFO:========================================================================
2022-01-13 16:52:48,704:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE film_category
2022-01-13 16:52:58,491:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.film_category
2022-01-13 16:52:58,492:INFO:========================================================================
2022-01-13 16:52:58,511:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE inventory
2022-01-13 16:53:10,172:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.inventory
2022-01-13 16:53:10,173:INFO:========================================================================
2022-01-13 16:59:23,242:INFO:started sample_db
2022-01-13 16:59:23,243:INFO:========================================================================
2022-01-13 16:59:23,243:INFO:GETTING CONFIGURATION FILE FOR DATABASE sample_db
2022-01-13 16:59:23,243:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE sample_db
2022-01-13 16:59:23,245:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE sample_db
2022-01-13 16:59:23,245:INFO:GETTING SCHEMA FILE FOR DATABASE sample_db
2022-01-13 16:59:23,249:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE sample_db
2022-01-13 16:59:24,320:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE sample_db AND TABLE Test
2022-01-13 16:59:37,352:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID sample_db.Test
2022-01-13 16:59:39,415:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE sample_db AND TABLE Train
2022-01-13 16:59:53,144:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID sample_db.Train
2022-01-13 16:59:53,144:INFO:========================================================================
2022-01-13 16:59:53,145:INFO:started original_db
2022-01-13 16:59:53,146:INFO:========================================================================
2022-01-13 16:59:53,146:INFO:GETTING CONFIGURATION FILE FOR DATABASE original_db
2022-01-13 16:59:53,146:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE original_db
2022-01-13 16:59:53,148:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE original_db
2022-01-13 16:59:53,148:INFO:GETTING SCHEMA FILE FOR DATABASE original_db
2022-01-13 16:59:53,157:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE original_db
2022-01-13 16:59:55,039:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE original_db AND TABLE commentInteractions
2022-01-13 17:00:05,937:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID dboriginal_db2.commentInteractions
2022-01-13 17:00:05,937:INFO:========================================================================
2022-01-13 17:00:05,999:INFO:started db1
2022-01-13 17:00:05,999:INFO:========================================================================
2022-01-13 17:00:05,999:INFO:GETTING CONFIGURATION FILE FOR DATABASE db1
2022-01-13 17:00:05,999:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE db1
2022-01-13 17:00:06,001:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE db1
2022-01-13 17:00:06,001:INFO:GETTING SCHEMA FILE FOR DATABASE db1
2022-01-13 17:00:06,005:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE db1
2022-01-13 17:00:06,465:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE db1 AND TABLE comments_clean_anonimized
2022-01-13 17:00:14,663:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID db1.comments_clean_anonimized
2022-01-13 17:00:14,663:INFO:========================================================================
2022-01-13 17:00:55,028:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE db1 AND TABLE votes
2022-01-13 17:01:07,063:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID db1.votes
2022-01-13 17:01:07,063:INFO:========================================================================
2022-01-13 17:01:07,063:INFO:started db2
2022-01-13 17:01:07,063:INFO:========================================================================
2022-01-13 17:01:07,063:INFO:GETTING CONFIGURATION FILE FOR DATABASE db2
2022-01-13 17:01:07,064:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE db2
2022-01-13 17:01:07,066:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE db2
2022-01-13 17:01:07,067:INFO:GETTING SCHEMA FILE FOR DATABASE db2
2022-01-13 17:01:07,072:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE db2
2022-01-13 17:01:09,268:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE db2 AND TABLE deliveries
2022-01-13 17:01:17,882:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID db2.deliveries
2022-01-13 17:01:17,882:INFO:========================================================================
2022-01-13 17:01:18,084:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE db2 AND TABLE matches
2022-01-13 17:01:26,266:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID db2.matches
2022-01-13 17:01:26,267:INFO:========================================================================
2022-01-13 17:01:26,297:INFO:started northwind2
2022-01-13 17:01:26,298:INFO:========================================================================
2022-01-13 17:01:26,298:INFO:GETTING CONFIGURATION FILE FOR DATABASE northwind2
2022-01-13 17:01:26,298:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE northwind2
2022-01-13 17:01:26,299:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE northwind2
2022-01-13 17:01:26,299:INFO:GETTING SCHEMA FILE FOR DATABASE northwind2
2022-01-13 17:01:26,302:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE northwind2
2022-01-13 17:01:26,465:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE northwind2 AND TABLE categories
2022-01-13 17:01:34,525:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID northwind2.categories
2022-01-13 17:01:34,526:INFO:========================================================================
2022-01-13 17:01:34,546:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE northwind2 AND TABLE customers
2022-01-13 17:01:45,971:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID northwind2.customers
2022-01-13 17:01:45,971:INFO:========================================================================
2022-01-13 17:01:45,975:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE northwind2 AND TABLE employeeterritories
2022-01-13 17:01:55,678:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID northwind2.employeeterritories
2022-01-13 17:01:55,679:INFO:========================================================================
2022-01-13 17:01:55,679:INFO:started pagila
2022-01-13 17:01:55,679:INFO:========================================================================
2022-01-13 17:01:55,679:INFO:GETTING CONFIGURATION FILE FOR DATABASE pagila
2022-01-13 17:01:55,679:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE pagila
2022-01-13 17:01:55,680:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE pagila
2022-01-13 17:01:55,681:INFO:GETTING SCHEMA FILE FOR DATABASE pagila
2022-01-13 17:01:55,684:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE pagila
2022-01-13 17:01:55,843:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE actor
2022-01-13 17:02:04,730:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.actor
2022-01-13 17:02:04,730:INFO:========================================================================
2022-01-13 17:02:04,736:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE category
2022-01-13 17:02:13,902:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.category
2022-01-13 17:02:13,903:INFO:========================================================================
2022-01-13 17:02:13,933:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE city
2022-01-13 17:02:23,839:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.city
2022-01-13 17:02:23,839:INFO:========================================================================
2022-01-13 17:02:23,850:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE country
2022-01-13 17:02:32,126:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.country
2022-01-13 17:02:32,126:INFO:========================================================================
2022-01-13 17:02:32,169:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE film_actor
2022-01-13 17:02:40,267:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.film_actor
2022-01-13 17:02:40,267:INFO:========================================================================
2022-01-13 17:02:40,279:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE film_category
2022-01-13 17:02:49,735:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.film_category
2022-01-13 17:02:49,736:INFO:========================================================================
2022-01-13 17:02:49,778:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE inventory
2022-01-13 17:02:58,068:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.inventory
2022-01-13 17:02:58,068:INFO:========================================================================
2022-01-13 19:54:22,629:INFO:started sample_db
2022-01-13 19:54:22,630:INFO:========================================================================
2022-01-13 19:54:22,630:INFO:GETTING CONFIGURATION FILE FOR DATABASE sample_db
2022-01-13 19:54:22,630:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE sample_db
2022-01-13 19:54:22,632:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE sample_db
2022-01-13 19:54:22,632:INFO:GETTING SCHEMA FILE FOR DATABASE sample_db
2022-01-13 19:54:22,640:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE sample_db
2022-01-13 19:54:23,692:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE sample_db AND TABLE Test
2022-01-13 19:54:35,285:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID sample_db.Test
2022-01-13 19:54:37,390:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE sample_db AND TABLE Train
2022-01-13 19:54:56,417:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID sample_db.Train
2022-01-13 19:54:56,418:INFO:========================================================================
2022-01-13 19:54:56,418:INFO:started original_db
2022-01-13 19:54:56,418:INFO:========================================================================
2022-01-13 19:54:56,418:INFO:GETTING CONFIGURATION FILE FOR DATABASE original_db
2022-01-13 19:54:56,419:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE original_db
2022-01-13 19:54:56,430:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE original_db
2022-01-13 19:54:56,430:INFO:GETTING SCHEMA FILE FOR DATABASE original_db
2022-01-13 19:54:56,449:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE original_db
2022-01-13 19:54:58,396:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE original_db AND TABLE commentInteractions
2022-01-13 19:55:09,824:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID dboriginal_db2.commentInteractions
2022-01-13 19:55:09,824:INFO:========================================================================
2022-01-13 19:55:10,043:INFO:started northwind2
2022-01-13 19:55:10,043:INFO:========================================================================
2022-01-13 19:55:10,044:INFO:GETTING CONFIGURATION FILE FOR DATABASE northwind2
2022-01-13 19:55:10,044:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE northwind2
2022-01-13 19:55:10,046:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE northwind2
2022-01-13 19:55:10,047:INFO:GETTING SCHEMA FILE FOR DATABASE northwind2
2022-01-13 19:55:10,055:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE northwind2
2022-01-13 19:55:10,212:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE northwind2 AND TABLE categories
2022-01-13 19:55:19,127:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID northwind2.categories
2022-01-13 19:55:19,127:INFO:========================================================================
2022-01-13 19:55:19,138:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE northwind2 AND TABLE customers
2022-01-13 19:55:29,368:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID northwind2.customers
2022-01-13 19:55:29,368:INFO:========================================================================
2022-01-13 19:55:29,388:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE northwind2 AND TABLE employeeterritories
2022-01-13 19:55:39,299:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID northwind2.employeeterritories
2022-01-13 19:55:39,299:INFO:========================================================================
2022-01-13 19:55:39,299:INFO:started pagila
2022-01-13 19:55:39,300:INFO:========================================================================
2022-01-13 19:55:39,300:INFO:GETTING CONFIGURATION FILE FOR DATABASE pagila
2022-01-13 19:55:39,300:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE pagila
2022-01-13 19:55:39,310:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE pagila
2022-01-13 19:55:39,311:INFO:GETTING SCHEMA FILE FOR DATABASE pagila
2022-01-13 19:55:39,336:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE pagila
2022-01-13 19:55:39,635:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE actor
2022-01-13 19:55:49,126:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.actor
2022-01-13 19:55:49,126:INFO:========================================================================
2022-01-13 19:55:49,144:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE category
2022-01-13 19:55:59,367:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.category
2022-01-13 19:55:59,368:INFO:========================================================================
2022-01-13 19:55:59,410:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE city
2022-01-13 19:56:08,608:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.city
2022-01-13 19:56:08,609:INFO:========================================================================
2022-01-13 19:56:08,642:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE country
2022-01-13 19:56:17,292:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.country
2022-01-13 19:56:17,293:INFO:========================================================================
2022-01-13 19:56:17,411:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE film_actor
2022-01-13 19:56:26,917:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.film_actor
2022-01-13 19:56:26,917:INFO:========================================================================
2022-01-13 19:56:26,953:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE film_category
2022-01-13 19:56:35,517:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.film_category
2022-01-13 19:56:35,517:INFO:========================================================================
2022-01-13 19:56:35,554:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE inventory
2022-01-13 19:56:44,936:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.inventory
2022-01-13 19:56:44,937:INFO:========================================================================
2022-01-13 19:56:45,010:INFO:started db1
2022-01-13 19:56:45,010:INFO:========================================================================
2022-01-13 19:56:45,010:INFO:GETTING CONFIGURATION FILE FOR DATABASE db1
2022-01-13 19:56:45,010:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE db1
2022-01-13 19:56:45,014:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE db1
2022-01-13 19:56:45,015:INFO:GETTING SCHEMA FILE FOR DATABASE db1
2022-01-13 19:56:45,021:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE db1
2022-01-13 19:56:45,528:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE db1 AND TABLE comments_clean_anonimized
2022-01-13 19:56:55,998:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID db1.comments_clean_anonimized
2022-01-13 19:56:55,998:INFO:========================================================================
2022-01-13 19:57:35,414:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE db1 AND TABLE votes
2022-01-13 19:57:49,347:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID db1.votes
2022-01-13 19:57:49,347:INFO:========================================================================
2022-01-13 19:57:49,347:INFO:started db2
2022-01-13 19:57:49,348:INFO:========================================================================
2022-01-13 19:57:49,348:INFO:GETTING CONFIGURATION FILE FOR DATABASE db2
2022-01-13 19:57:49,348:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE db2
2022-01-13 19:57:49,357:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE db2
2022-01-13 19:57:49,358:INFO:GETTING SCHEMA FILE FOR DATABASE db2
2022-01-13 19:57:49,382:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE db2
2022-01-13 19:57:54,090:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE db2 AND TABLE deliveries
2022-01-13 19:58:05,116:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID db2.deliveries
2022-01-13 19:58:05,116:INFO:========================================================================
2022-01-13 19:58:05,861:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE db2 AND TABLE matches
2022-01-13 19:58:18,428:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID db2.matches
2022-01-13 19:58:18,429:INFO:========================================================================
2022-01-13 19:58:22,123:INFO:started sample_db
2022-01-13 19:58:22,123:INFO:========================================================================
2022-01-13 19:58:22,123:INFO:GETTING CONFIGURATION FILE FOR DATABASE sample_db
2022-01-13 19:58:22,123:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE sample_db
2022-01-13 19:58:22,124:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE sample_db
2022-01-13 19:58:22,125:INFO:GETTING SCHEMA FILE FOR DATABASE sample_db
2022-01-13 19:58:22,128:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE sample_db
2022-01-13 19:58:22,983:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE sample_db AND TABLE Test
2022-01-13 19:58:34,196:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID sample_db.Test
2022-01-13 19:58:36,372:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE sample_db AND TABLE Train
2022-01-13 19:58:55,088:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID sample_db.Train
2022-01-13 19:58:55,088:INFO:========================================================================
2022-01-13 19:58:55,089:INFO:started original_db
2022-01-13 19:58:55,089:INFO:========================================================================
2022-01-13 19:58:55,089:INFO:GETTING CONFIGURATION FILE FOR DATABASE original_db
2022-01-13 19:58:55,090:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE original_db
2022-01-13 19:58:55,094:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE original_db
2022-01-13 19:58:55,095:INFO:GETTING SCHEMA FILE FOR DATABASE original_db
2022-01-13 19:58:55,105:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE original_db
2022-01-13 19:58:57,087:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE original_db AND TABLE commentInteractions
2022-01-13 19:59:10,513:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID dboriginal_db2.commentInteractions
2022-01-13 19:59:10,513:INFO:========================================================================
2022-01-13 19:59:10,653:INFO:started northwind2
2022-01-13 19:59:10,654:INFO:========================================================================
2022-01-13 19:59:10,654:INFO:GETTING CONFIGURATION FILE FOR DATABASE northwind2
2022-01-13 19:59:10,655:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE northwind2
2022-01-13 19:59:10,663:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE northwind2
2022-01-13 19:59:10,664:INFO:GETTING SCHEMA FILE FOR DATABASE northwind2
2022-01-13 19:59:10,680:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE northwind2
2022-01-13 19:59:10,997:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE northwind2 AND TABLE categories
2022-01-13 19:59:20,093:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID northwind2.categories
2022-01-13 19:59:20,093:INFO:========================================================================
2022-01-13 19:59:20,098:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE northwind2 AND TABLE customers
2022-01-13 19:59:28,448:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID northwind2.customers
2022-01-13 19:59:28,449:INFO:========================================================================
2022-01-13 19:59:28,464:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE northwind2 AND TABLE employeeterritories
2022-01-13 19:59:37,944:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID northwind2.employeeterritories
2022-01-13 19:59:37,945:INFO:========================================================================
2022-01-13 19:59:37,945:INFO:started pagila
2022-01-13 19:59:37,945:INFO:========================================================================
2022-01-13 19:59:37,945:INFO:GETTING CONFIGURATION FILE FOR DATABASE pagila
2022-01-13 19:59:37,945:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE pagila
2022-01-13 19:59:37,950:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE pagila
2022-01-13 19:59:37,950:INFO:GETTING SCHEMA FILE FOR DATABASE pagila
2022-01-13 19:59:37,960:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE pagila
2022-01-13 19:59:38,284:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE actor
2022-01-13 19:59:46,258:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.actor
2022-01-13 19:59:46,259:INFO:========================================================================
2022-01-13 19:59:46,266:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE category
2022-01-13 19:59:56,114:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.category
2022-01-13 19:59:56,114:INFO:========================================================================
2022-01-13 19:59:56,128:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE city
2022-01-13 20:00:06,926:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.city
2022-01-13 20:00:06,927:INFO:========================================================================
2022-01-13 20:00:06,950:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE country
2022-01-13 20:00:17,687:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.country
2022-01-13 20:00:17,687:INFO:========================================================================
2022-01-13 20:00:17,716:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE film_actor
2022-01-13 20:00:26,465:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.film_actor
2022-01-13 20:00:26,465:INFO:========================================================================
2022-01-13 20:00:26,474:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE film_category
2022-01-13 20:00:35,650:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.film_category
2022-01-13 20:00:35,650:INFO:========================================================================
2022-01-13 20:00:35,677:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE inventory
2022-01-13 20:00:44,092:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.inventory
2022-01-13 20:00:44,092:INFO:========================================================================
2022-01-13 20:00:44,181:INFO:started db1
2022-01-13 20:00:44,182:INFO:========================================================================
2022-01-13 20:00:44,182:INFO:GETTING CONFIGURATION FILE FOR DATABASE db1
2022-01-13 20:00:44,182:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE db1
2022-01-13 20:00:44,183:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE db1
2022-01-13 20:00:44,184:INFO:GETTING SCHEMA FILE FOR DATABASE db1
2022-01-13 20:00:44,187:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE db1
2022-01-13 20:00:44,728:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE db1 AND TABLE comments_clean_anonimized
2022-01-13 20:00:53,672:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID db1.comments_clean_anonimized
2022-01-13 20:00:53,672:INFO:========================================================================
2022-01-13 20:01:41,373:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE db1 AND TABLE votes
2022-01-13 20:01:51,404:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID db1.votes
2022-01-13 20:01:51,404:INFO:========================================================================
2022-01-13 20:01:51,404:INFO:started db2
2022-01-13 20:01:51,404:INFO:========================================================================
2022-01-13 20:01:51,405:INFO:GETTING CONFIGURATION FILE FOR DATABASE db2
2022-01-13 20:01:51,405:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE db2
2022-01-13 20:01:51,408:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE db2
2022-01-13 20:01:51,408:INFO:GETTING SCHEMA FILE FOR DATABASE db2
2022-01-13 20:01:51,415:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE db2
2022-01-13 20:01:55,049:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE db2 AND TABLE deliveries
2022-01-13 20:02:03,420:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID db2.deliveries
2022-01-13 20:02:03,421:INFO:========================================================================
2022-01-13 20:02:03,823:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE db2 AND TABLE matches
2022-01-13 20:02:13,535:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID db2.matches
2022-01-13 20:02:13,536:INFO:========================================================================
2022-01-13 20:28:24,581:INFO:started northwind2
2022-01-13 20:28:24,581:INFO:========================================================================
2022-01-13 20:28:24,582:INFO:GETTING CONFIGURATION FILE FOR DATABASE northwind2
2022-01-13 20:28:24,582:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE northwind2
2022-01-13 20:28:24,584:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE northwind2
2022-01-13 20:28:24,584:INFO:GETTING SCHEMA FILE FOR DATABASE northwind2
2022-01-13 20:28:24,589:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE northwind2
2022-01-13 20:28:25,127:INFO:started sample_db
2022-01-13 20:28:25,127:INFO:========================================================================
2022-01-13 20:28:25,127:INFO:GETTING CONFIGURATION FILE FOR DATABASE sample_db
2022-01-13 20:28:25,127:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE sample_db
2022-01-13 20:28:25,129:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE sample_db
2022-01-13 20:28:25,129:INFO:GETTING SCHEMA FILE FOR DATABASE sample_db
2022-01-13 20:28:25,133:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE sample_db
2022-01-13 20:30:58,839:INFO:started northwind2
2022-01-13 20:30:58,839:INFO:========================================================================
2022-01-13 20:30:58,839:INFO:GETTING CONFIGURATION FILE FOR DATABASE northwind2
2022-01-13 20:30:58,839:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE northwind2
2022-01-13 20:30:58,843:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE northwind2
2022-01-13 20:30:58,843:INFO:GETTING SCHEMA FILE FOR DATABASE northwind2
2022-01-13 20:30:58,851:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE northwind2
# This is a sample Python script.
# Press Shift+F10 to execute it or replace it with your code.
# Press Double Shift to search everywhere for classes, files, tool windows, actions, and settings.
def print_hi(name):
# Use a breakpoint in the code line below to debug your script.
print(f'Hi, {name}') # Press Ctrl+F8 to toggle the breakpoint.
# Press the green button in the gutter to run the script.
if __name__ == '__main__':
print_hi('PyCharm')
# See PyCharm help at https://www.jetbrains.com/help/pycharm/
import pandas_gbq as pd_gbq
import pandas as pd
import pymysql
class MSQLPOOl:
def get_connection(self,db_host,db_user,db_pass):
try:
conn = pymysql.connect(
host=db_host,
user=db_user,
password=db_pass,
)
except Exception as e:
print(e)
else:
return conn
def read_and_prepare_data(self,db_name,table,conn):
try:
query = "SELECT * FROM {0}.{1} ;".format(db_name,table)
df = pd.read_sql(query,conn)
except Exception as e:
print(e)
else:
return df
def load_into_bq(self,df,table_id,project_id,schema_json):
try:
pd_gbq.to_gbq(df,table_id,
project_id=project_id,
table_schema=schema_json,
if_exists='replace')
except Exception as e:
print(e)
import pandas_gbq as pd_gbq
import pandas as pd
import psycopg2
class POSTGRESPOOL:
def get_connection(self,db_host,db_user,db_pass,db_name):
try:
conn = psycopg2.connect(
host=db_host,
user=db_user,
password=db_pass,
database= db_name
)
except Exception as e:
print(e)
else:
return conn
def read_and_prepare_data(self,table,conn):
try:
query = "SELECT * FROM {0};".format(table)
df = pd.read_sql(query,conn)
except Exception as e:
print(e)
else:
return df
def load_into_bq(self,df,table_id,project_id,schema_json):
try:
pd_gbq.to_gbq(df,table_id,
project_id=project_id,
table_schema=schema_json,
if_exists='replace')
except Exception as e:
print(e)
import pandas_gbq as pd_gbq
import pandas as pd
import pyodbc
class SQLSERVERPOOL:
def get_connection(self,db_name):
try:
pyodbc.drivers()
conn = pyodbc.connect('DRIVER=SQL Server;SERVER=HYD-LAP-0370\SQLEXPRESS;DATABASE={0};Trusted_Connection=yes;'.format(db_name))
except Exception as e:
print(e)
else:
return conn
def read_and_prepare_data(self,db_name,table,conn):
try:
query = "SELECT * FROM {0}.dbo.{1};".format(db_name,table)
df = pd.read_sql(query,conn)
except Exception as e:
print(e)
else:
return df
def load_into_bq(self,df,table_id,project_id,schema_json):
try:
pd_gbq.to_gbq(df,table_id,
project_id=project_id,
table_schema=schema_json,
if_exists='replace')
except Exception as e:
print(e)
from wtforms import Form,SelectField,SubmitField
class RegistrationForm(Form):
list_of_databases =["MySql","Postgres","SQLServer"]
SelectField(u'Field name', choices = list_of_databases)
submit = SubmitField('Submit')
\ No newline at end of file
pandas - 1.3.5
numpy - 1.22.0
six - 1.16.0
pytz - 2021.3
python-dateutil - 2.8.2
psycopg2 - 2.9.3
pyodbc-4.0.32
mysql-connector-python - 8.0.27
mysql-connector-python~=8.0.27
pandas~=1.3.5
psycopg2~=2.9.3
pyodbc~=4.0.32
\ No newline at end of file
from flask import Flask, flash, redirect, render_template,request, url_for
from wtforms import Form,SelectField
from onpremisesdatabases import sqlserver1, postgres1, mysql1
from registrationform import RegistrationForm
app = Flask(__name__)
app.secret_key ="MY_SECRET_KEY"
@app.route('/', methods=['GET', 'POST'])
def register():
form = RegistrationForm(request.form)
if request.method == 'POST' and form.validate():
if form.db_name == "My SQl":
return mysql1
elif form.db_name == "Postgres":
return postgres1
elif form.db_name == "SQL Server":
return sqlserver1
flash('Thanks for registering')
return redirect(url_for('thankyou'))
return render_template('db.html', form=form)
if __name__ =="__main__":
app.run(debug=True,port=1234)
\ No newline at end of file
database,table,name,type,mode
db1_config,comments_clean_anonimized,employee,INTEGER,NULLABLE
db1_config,comments_clean_anonimized,companyAlias,STRING,NULLABLE
db1_config,comments_clean_anonimized,commentId,STRING,NULLABLE
db1_config,comments_clean_anonimized,txt,STRING,NULLABLE
db1_config,comments_clean_anonimized,likes,INTEGER,NULLABLE
db1_config,comments_clean_anonimized,dislikes,INTEGER,NULLABLE
db1_config,comments_clean_anonimized,commentDate,STRING,NULLABLE
db1_config,votes,employee,INTEGER,NULLABLE
db1_config,votes,companyAlias,STRING,NULLABLE
db1_config,votes,voteDate,STRING,NULLABLE
db1_config,votes,vote,INTEGER,NULLABLE
\ No newline at end of file
database,table,name,type,mode
db2_config,deliveries,match_id,INTEGER,NULLABLE
db2_config,deliveries,inning,INTEGER,NULLABLE
db2_config,deliveries,batting_team,STRING,NULLABLE
db2_config,deliveries,bowling_team,STRING,NULLABLE
db2_config,deliveries,over,INTEGER,NULLABLE
db2_config,deliveries,ball,INTEGER,NULLABLE
db2_config,deliveries,batsman,STRING,NULLABLE
db2_config,deliveries,non_striker,STRING,NULLABLE
db2_config,deliveries,bowler,STRING,NULLABLE
db2_config,deliveries,is_super_over,INTEGER,NULLABLE
db2_config,deliveries,wide_runs,INTEGER,NULLABLE
db2_config,deliveries,bye_runs,INTEGER,NULLABLE
db2_config,deliveries,legbye_runs,INTEGER,NULLABLE
db2_config,deliveries,noball_runs,INTEGER,NULLABLE
db2_config,deliveries,penalty_runs,INTEGER,NULLABLE
db2_config,deliveries,batsman_runs,INTEGER,NULLABLE
db2_config,deliveries,extra_runs,INTEGER,NULLABLE
db2_config,deliveries,total_runs,INTEGER,NULLABLE
db2_config,deliveries,player_dismissed,STRING,NULLABLE
db2_config,deliveries,dismissal_kind,STRING,NULLABLE
db2_config,deliveries,fielder,STRING,NULLABLE
db2_config,matches,id,INTEGER,NULLABLE
db2_config,matches,season,INTEGER,NULLABLE
db2_config,matches,city,STRING,NULLABLE
db2_config,matches,date,STRING,NULLABLE
db2_config,matches,team1,STRING,NULLABLE
db2_config,matches,team2,STRING,NULLABLE
db2_config,matches,toss_winner,STRING,NULLABLE
db2_config,matches,toss_decision,STRING,NULLABLE
db2_config,matches,result,STRING,NULLABLE
db2_config,matches,dl_applied,INTEGER,NULLABLE
db2_config,matches,winner,STRING,NULLABLE
db2_config,matches,win_by_runs,INTEGER,NULLABLE
db2_config,matches,win_by_wickets,INTEGER,NULLABLE
db2_config,matches,player_of_match,STRING,NULLABLE
db2_config,matches,venue,STRING,NULLABLE
db2_config,matches,umpire1,STRING,NULLABLE
db2_config,matches,umpire2,STRING,NULLABLE
db2_config,matches,umpire3,STRING,NULLABLE
\ No newline at end of file
{
"type": "service_account",
"project_id": "flaskdatabasemigratrions",
"private_key_id": "2624cb45258191a02926923bd3b668049c5ef628",
"private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQCbNTtvy12s6r2b\n5aFuA3Xsnol2n04gf2vL3BglZARtLFXQkQajIbl0E1MNj6i5/MUYdd6FpOIPWz7u\n8T+0VBvu21rOTdaLLxq8a1rGuOvqCmjh6BJBE0F/q/9S7I5uENQsm9DvzLGIlnnl\nfh8yasgqqKy1oSG1jZ0k7U0kC9ibj/BtBJmba7Q6KTPOUzsgya8GFJfv+SudiM0y\n6dloY9kXPORvWI58zX9P3zv2JGs9B0Ihub1SavrX3dcop6+xmFcBN2XwNnLaouPh\n2Fl9XcPmvHmwSgZ8RuxGFlivn7PcGT3WcneWujO/iDqmWAEeY0jSnFo0rGZIY0Vs\nlhxbMd4ZAgMBAAECggEACOfii7UnIGNt0tK30xi2vQeRyrE0ae/1zwJHjmmxSVRo\nbinMMDMgsZTkSJxsKZPj+VXWGWQPJqNtNoe4SJAk9C05Bip4O49N4O7mDA4TV2AW\niZPn7Dz/HqTPZ0I0oAMuMYsVT5U2wKglwXHdt2ZiKvR3yIpA5WqFUFeffAiD+tUO\nGxVMdtURTqa3zGludsMXX7Af2GRGR5NpeH/Sxj7/1ENb0Q2wDai7dj3h/8n0B8Yc\nsAP6/JmfnEl3o48jkHR8OInLFl+TR8JxEXVN3mWiQHH/7s/drvMGMFegfjs3Rhrt\nbUDfxDUq9w5vf4ZJ6G+Hwk+b1PMsAQcTBDnABrxYaQKBgQDYBFhTiqGNDi68kZPc\nprAQ126j3SZenBdhjX6cEyUPryw8dJpWsUwuEJUBMjpVp46Cxyr4KSn1wUN8vLz2\nnBtSRp7Xij16f2WjNgkqB6f9lQeTUm0e3yuekpJReRp5dFDMykP/6kn2gBb3w9Dp\niUQGWtz9cIPDAy8YQJmxIFCHTQKBgQC374nBgzks8X/UhtUpbrpEpKiBBh59XelM\nnAnWkUULbCMIAbh1T+TErASa+qaZZc8JV/5QB+OjpL2Ntw3OyzbJOpPz2rY+wh8B\nKbmD041/YOeOhv6aSrhym4gOTXKkncDqwVhvXNysMk996NbZChS6h9tDikKCgx0Y\nfYXhnX9D/QKBgG709GWc/NRWZ51GbZvWJvPPpK4Yxz+rj7r1EgabMts2kFNRVYGY\nOlgi/laJ9TrQATydjy2+/yWTqtWkoK9Pih0d1zNBd8i3LX7DH7vuCNUROr3qhdBo\ns6AVC4E74mRmYaZ/cqQ+NNWf/6wucd2zOzTEcWTg6V0/Am7Dp+46egK1AoGBAKG3\nRRKSYO6W1aDxevCWncZETrJBwwTOUJQ+jJfN2TqVtKcRfDAIMkaIRvRLc7ZiwIii\niMGsbgQuj0nNq6IpIN4DVpmLrbe3zLKlEKpKkZlwhaXWIntdPpXlENYiWa1J1ZeQ\nrkQbDRmIkkNNN7hTGK53S9wx7w/4XthtZAxdAMuxAoGBAKUckLI04eS+nfq+eptR\nFltG9R/T1u+mSKyIlNYQzScLQdcd8rNRexJYwFvprbXAT8DNyDwyarIPPx5P3e2/\nlgXfRifRu1Lo6MQzKxw+MR5hZbrJchxSHPtA6ayLykzYCn3D+ollm2lwdjr8Bpu1\n9stynj25Ca6Es3i3MYIj4vgv\n-----END PRIVATE KEY-----\n",
"client_email": "prudhvee2@flaskdatabasemigratrions.iam.gserviceaccount.com",
"client_id": "101647014497676275501",
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
"token_uri": "https://oauth2.googleapis.com/token",
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/prudhvee2%40flaskdatabasemigratrions.iam.gserviceaccount.com"
}
database,table,name,type,mode
hr_analytics,aug_test,enrollee_id,INTEGER,NULLABLE
hr_analytics,aug_test,city,STRING,NULLABLE
hr_analytics,aug_test,ecity_development_index,FLOAT,NULLABLE
hr_analytics,aug_test,gender,STRING,NULLABLE
hr_analytics,aug_test,relevent_experience,STRING,NULLABLE
hr_analytics,aug_test,enrolled_university,STRING,NULLABLE
hr_analytics,aug_test,education_level,STRING,NULLABLE
hr_analytics,aug_test,major_discipline,STRING,NULLABLE
hr_analytics,aug_test,experience,STRING,NULLABLE
hr_analytics,aug_test,company_size,STRING,NULLABLE
hr_analytics,aug_test,company_type,STRING,NULLABLE
hr_analytics,aug_test,last_new_job,STRING,NULLABLE
hr_analytics,aug_test,training_hours,INTEGER,NULLABLE
hr_analytics,aug_train,enrollee_id,INTEGER,NULLABLE
hr_analytics,aug_train,city,STRING,NULLABLE
hr_analytics,aug_train,ecity_development_index,FLOAT,NULLABLE
hr_analytics,aug_train,gender,STRING,NULLABLE
hr_analytics,aug_train,relevent_experience,STRING,NULLABLE
hr_analytics,aug_train,enrolled_university,STRING,NULLABLE
hr_analytics,aug_train,education_level,STRING,NULLABLE
hr_analytics,aug_train,major_discipline,STRING,NULLABLE
hr_analytics,aug_train,experience,STRING,NULLABLE
hr_analytics,aug_train,company_size,STRING,NULLABLE
hr_analytics,aug_train,company_type,STRING,NULLABLE
hr_analytics,aug_train,last_new_job,STRING,NULLABLE
hr_analytics,aug_train,training_hours,INTEGER,NULLABLE
hr_analytics,aug_train,target,FLOAT,NULLABLE
hr_analytics,sample_submission,enrollee_id,INTEGER,NULLABLE
hr_analytics,sample_submission,target,FLOAT,NULLABLE
database,table,name,type,mode
northwind2_config,categories,categoryid,INTEGER,NULLABLE
northwind2_config,categories,categoryname,STRING,NULLABLE
northwind2_config,categories,description,STRING,NULLABLE
northwind2_config,categories,picture,STRING,NULLABLE
northwind2_config,customers,customerid,STRING,NULLABLE
northwind2_config,customers,companyname,STRING,NULLABLE
northwind2_config,customers,contactname,STRING,NULLABLE
northwind2_config,customers,contacttitle,STRING,NULLABLE
northwind2_config,customers,address,STRING,NULLABLE
northwind2_config,customers,city,STRING,NULLABLE
northwind2_config,customers,region,STRING,NULLABLE
northwind2_config,customers,postalcode,STRING,NULLABLE
northwind2_config,customers,country,STRING,NULLABLE
northwind2_config,customers,phone,STRING,NULLABLE
northwind2_config,customers,fax,STRING,NULLABLE
northwind2_config,employeeterritories,employeeid,INTEGER,NULLABLE
northwind2_config,employeeterritories,territoryid,STRING,NULLABLE
database,table,name,type,mode
original_db_config,commentInteractions,employee,STRING,NULLABLE
original_db_config,commentInteractions,companyAlias,STRING,NULLABLE
original_db_config,commentInteractions,liked,STRING,NULLABLE
original_db_config,commentInteractions,disliked,STRING,NULLABLE
original_db_config,commentInteractions,commentId,STRING,NULLABLE
database,table,name,type,mode
pagila_config,actor,actor_id,INTEGER,NULLABLE
pagila_config,actor,first_name,STRING,NULLABLE
pagila_config,actor,last_name,STRING,NULLABLE
pagila_config,actor,last_update,DATETIME,NULLABLE
pagila_config,category,category_id,INTEGER,NULLABLE
pagila_config,category,name,STRING,NULLABLE
pagila_config,category,last_update,DATETIME,NULLABLE
pagila_config,city,city_id,INTEGER,NULLABLE
pagila_config,city,city,STRING,NULLABLE
pagila_config,city,country_id,INTEGER,NULLABLE
pagila_config,city,last_update,DATETIME,NULLABLE
pagila_config,country,country_id,INTEGER,NULLABLE
pagila_config,country,country,STRING,NULLABLE
pagila_config,country,last_update,DATETIME,NULLABLE
pagila_config,film_actor,actor_id,INTEGER,NULLABLE
pagila_config,film_actor,film_id,INTEGER,NULLABLE
pagila_config,film_actor,last_update,DATETIME,NULLABLE
pagila_config,film_category,film_id,INTEGER,NULLABLE
pagila_config,film_category,category_id,INTEGER,NULLABLE
pagila_config,film_category,last_update,DATETIME,NULLABLE
pagila_config,inventory,language_id,INTEGER,NULLABLE
pagila_config,inventory,name,STRING,NULLABLE
pagila_config,inventory,last_update,DATETIME,NULLABLE
\ No newline at end of file
database,table,name,type,mode
sample_db_config,Test,InvoiceNo,STRING,NULLABLE
sample_db_config,Test,StockCode,STRING,NULLABLE
sample_db_config,Test,Description,STRING,NULLABLE
sample_db_config,Test,Quantity,STRING,NULLABLE
sample_db_config,Test,InvoiceDate,STRING,NULLABLE
sample_db_config,Test,CustomerID,STRING,NULLABLE
sample_db_config,Test,Country,STRING,NULLABLE
sample_db_config,Train,InvoiceNo,STRING,NULLABLE
sample_db_config,Train,StockCode,STRING,NULLABLE
sample_db_config,Train,Description,STRING,NULLABLE
sample_db_config,Train,Quantity,STRING,NULLABLE
sample_db_config,Train,InvoiceDate,STRING,NULLABLE
sample_db_config,Train,UnitPrice,STRING,NULLABLE
sample_db_config,Train,CustomerID,STRING,NULLABLE
sample_db_config,Train,Country,STRING,NULLABLE
from flask import Flask, render_template
app = Flask(__name__)
@app.route('/dbmigration')
def dbmigration():
return render_template('index.html')
app.run(host='localhost', port=5000)
/* Navbar container */
.navbar {
overflow: hidden;
background-color: #333;
font-family: Arial;
}
/* Links inside the navbar */
.navbar a {
float: left;
font-size: 16px;
color: white;
text-align: center;
padding: 14px 16px;
text-decoration: none;
}
/* The dropdown container */
.dropdown {
float: left;
overflow: hidden;
}
/* Dropdown button */
.dropdown .dropbtn {
font-size: 16px;
border: none;
outline: none;
color: white;
padding: 14px 16px;
background-color: inherit;
font-family: inherit; /* Important for vertical align on mobile phones */
margin: 0; /* Important for vertical align on mobile phones */
}
/* Add a red background color to navbar links on hover */
.navbar a:hover, .dropdown:hover .dropbtn {
background-color: red;
}
/* Dropdown content (hidden by default) */
.dropdown-content {
display: none;
position: absolute;
background-color: #f9f9f9;
min-width: 160px;
box-shadow: 0px 8px 16px 0px rgba(0,0,0,0.2);
z-index: 1;
}
/* Links inside the dropdown */
.dropdown-content a {
float: none;
color: black;
padding: 12px 16px;
text-decoration: none;
display: block;
text-align: left;
}
/* Add a grey background color to dropdown links on hover */
.dropdown-content a:hover {
background-color: #ddd;
}
/* Show the dropdown menu on hover */
.dropdown:hover .dropdown-content {
display: block;
}
\ No newline at end of file
<form action="" method="post">
<h1> Flask database migrations </h1>
<p>
{{ form.DB_NAMES }}<br>
{% for db in form.DB_NAMES.errors %}
<span style="color: red;">[{{ db }}]</span>
{% endfor %}
</p>
<p>{{ form.submit() }}</p>
</form>
\ No newline at end of file
<form class="form-inline" method="POST" action="{{ url_for('test') }}">
<div class="form-group">
<div class="input-group">
<span class="input-group-addon">Select SERVER</span>
<select name="comp_select" class="selectpicker form-control">
{% for o in data %}
<option value="{{ o.name }}">{{ o.name }}</option>
{% endfor %}
</select>
</div>
<button type="submit" class="btn btn-default">Go</button>
</div>
</form>
<!--{% block styles %}-->
<!-- <style>-->
<!-- body { background: #fcfce6; }-->
<!-- </style>-->
<!--{% endblock %}-->
<!--&lt;!&ndash;{% block title %}&ndash;&gt;-->
<!--&lt;!&ndash;dbmigration&ndash;&gt;-->
<!--&lt;!&ndash;{% endblock %}&ndash;&gt;-->
<!--{% block content %}-->
<!--<h2 style="text-align:center;">FLASK DATABASE MIGRATIONS</h2>-->
<!--&lt;!&ndash; <div style="position: relative;display: inline-block;">&ndash;&gt;-->
<!--&lt;!&ndash; <button onclick="myFunction()";style="background-color: #04AA6D;color: white; padding: 16px;font-size: 16px;">SERVERS&ndash;&gt;-->
<!--&lt;!&ndash; <i class="fa fa-caret-down"></i>&ndash;&gt;-->
<!--&lt;!&ndash; </button>&ndash;&gt;-->
<!--&lt;!&ndash; <div style="display: none;position: absolute; background-color: #f1f1f1; min-width: 160px; box-shadow: 0px 8px 16px 0px rgba(0,0,0,0.2);z-index: 1;">&ndash;&gt;-->
<!--&lt;!&ndash; <a style="color: black,padding: 12px 16px;text-decoration: none;display: block";"href="#">MY SQL</a>&ndash;&gt;-->
<!--&lt;!&ndash; <a style="color: black,padding: 12px 16px;text-decoration: none;display: block";"href="#">POSTGRES</a>&ndash;&gt;-->
<!--&lt;!&ndash; <a style="color: black,padding: 12px 16px;text-decoration: none;display: block";"href="#">SQL SERVER</a>&ndash;&gt;-->
<!--&lt;!&ndash; </div>&ndash;&gt;-->
<!--&lt;!&ndash; </div>&ndash;&gt;-->
<!--<select name = "SERVERS">-->
<!-- <button class="btn btn-secondary dropdown-toggle" type="button" data-bs-toggle="dropdown">SERVERS</button>-->
<!--<option value = "my" >MY SQL</option>-->
<!--<option value = "post">POSTGRES</option>-->
<!--<option value = "sql" >SQL SERVER</option>-->
<!--</select>-->
<!--{% endblock %}-->
\ No newline at end of file
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Thankyou</title>
</head>
<body>
<table>
{% for item in items %}
<TR>
<TD class="c1">{{item}}</TD>
</TR>
{% endfor %}
</table>
</body>
</html>
\ No newline at end of file
2022-01-12 14:03:14,949:INFO:started northwind2
2022-01-12 14:03:14,949:INFO:========================================================================
2022-01-12 14:03:14,949:INFO:GETTING CONFIGURATION FILE FOR DATABASE northwind2
2022-01-12 14:03:14,949:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE northwind2
2022-01-12 14:03:14,953:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE northwind2
2022-01-12 14:03:14,953:INFO:GETTING SCHEMA FILE FOR DATABASE northwind2
2022-01-12 14:03:14,962:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE northwind2
2022-01-12 14:03:15,124:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE northwind2 AND TABLE categories
2022-01-12 14:03:25,628:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID northwind2.categories
2022-01-12 14:03:25,629:INFO:========================================================================
2022-01-12 14:03:25,659:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE northwind2 AND TABLE customers
2022-01-12 14:03:34,024:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID northwind2.customers
2022-01-12 14:03:34,025:INFO:========================================================================
2022-01-12 14:03:34,045:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE northwind2 AND TABLE employeeterritories
2022-01-12 14:03:45,344:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID northwind2.employeeterritories
2022-01-12 14:03:45,345:INFO:========================================================================
2022-01-12 14:03:45,345:INFO:started pagila
2022-01-12 14:03:45,345:INFO:========================================================================
2022-01-12 14:03:45,346:INFO:GETTING CONFIGURATION FILE FOR DATABASE pagila
2022-01-12 14:03:45,346:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE pagila
2022-01-12 14:03:45,351:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE pagila
2022-01-12 14:03:45,352:INFO:GETTING SCHEMA FILE FOR DATABASE pagila
2022-01-12 14:03:45,368:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE pagila
2022-01-12 14:03:45,652:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE actor
2022-01-12 14:03:56,560:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.actor
2022-01-12 14:03:56,560:INFO:========================================================================
2022-01-12 14:03:56,572:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE category
2022-01-12 14:04:06,777:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.category
2022-01-12 14:04:06,777:INFO:========================================================================
2022-01-12 14:04:06,789:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE city
2022-01-12 14:04:18,465:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.city
2022-01-12 14:04:18,465:INFO:========================================================================
2022-01-12 14:04:18,479:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE country
2022-01-12 14:04:27,211:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.country
2022-01-12 14:04:27,211:INFO:========================================================================
2022-01-12 14:04:27,322:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE film_actor
2022-01-12 14:04:36,077:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.film_actor
2022-01-12 14:04:36,077:INFO:========================================================================
2022-01-12 14:04:36,110:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE film_category
2022-01-12 14:04:45,495:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.film_category
2022-01-12 14:04:45,496:INFO:========================================================================
2022-01-12 14:04:45,528:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE pagila AND TABLE inventory
2022-01-12 14:05:01,519:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID pagila.inventory
2022-01-12 14:05:01,519:INFO:========================================================================
2022-01-12 14:09:17,085:INFO:started sample_db
2022-01-12 14:09:17,085:INFO:========================================================================
2022-01-12 14:09:17,085:INFO:GETTING CONFIGURATION FILE FOR DATABASE sample_db
2022-01-12 14:09:17,085:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE sample_db
2022-01-12 14:09:17,088:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE sample_db
2022-01-12 14:09:17,089:INFO:GETTING SCHEMA FILE FOR DATABASE sample_db
2022-01-12 14:09:17,095:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE sample_db
2022-01-12 14:09:18,137:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE sample_db AND TABLE Test
2022-01-12 14:09:30,374:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID sample_db.Test
2022-01-12 14:09:32,562:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE sample_db AND TABLE Train
2022-01-12 14:09:51,779:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID sample_db.Train
2022-01-12 14:09:51,779:INFO:========================================================================
2022-01-12 14:09:51,779:INFO:started original_db
2022-01-12 14:09:51,780:INFO:========================================================================
2022-01-12 14:09:51,780:INFO:GETTING CONFIGURATION FILE FOR DATABASE original_db
2022-01-12 14:09:51,781:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE original_db
2022-01-12 14:09:51,790:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE original_db
2022-01-12 14:09:51,790:INFO:GETTING SCHEMA FILE FOR DATABASE original_db
2022-01-12 14:09:51,809:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE original_db
2022-01-12 14:09:53,802:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE original_db AND TABLE commentInteractions
2022-01-12 14:10:05,802:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID dboriginal_db2.commentInteractions
2022-01-12 14:10:05,802:INFO:========================================================================
2022-01-12 14:19:57,429:INFO:started db1
2022-01-12 14:19:57,430:INFO:========================================================================
2022-01-12 14:19:57,430:INFO:GETTING CONFIGURATION FILE FOR DATABASE db1
2022-01-12 14:19:57,430:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE db1
2022-01-12 14:19:57,433:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE db1
2022-01-12 14:19:57,433:INFO:GETTING SCHEMA FILE FOR DATABASE db1
2022-01-12 14:19:57,441:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE db1
2022-01-12 14:19:59,083:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE db1 AND TABLE comments_clean_anonimized
2022-01-12 14:20:10,472:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID db1.comments_clean_anonimized
2022-01-12 14:20:10,473:INFO:========================================================================
2022-01-12 14:20:53,157:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE db1 AND TABLE votes
2022-01-12 14:21:04,127:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID db1.votes
2022-01-12 14:21:04,128:INFO:========================================================================
2022-01-12 14:21:04,128:INFO:started db2
2022-01-12 14:21:04,128:INFO:========================================================================
2022-01-12 14:21:04,128:INFO:GETTING CONFIGURATION FILE FOR DATABASE db2
2022-01-12 14:21:04,129:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE db2
2022-01-12 14:21:04,139:INFO:SUCCESSFULLY LOADED CONFIGURATION FILE FOR DATABASE db2
2022-01-12 14:21:04,139:INFO:GETTING SCHEMA FILE FOR DATABASE db2
2022-01-12 14:21:04,163:INFO:SUCCESSFULLY LOADED SCHEMA FILE FOR DATABASE db2
2022-01-12 14:21:11,928:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE db2 AND TABLE deliveries
2022-01-12 14:21:20,612:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID db2.deliveries
2022-01-12 14:21:20,612:INFO:========================================================================
2022-01-12 14:21:21,119:INFO:SUCCESSFULLY GOT TABLE DATA FROM DATABASE db2 AND TABLE matches
2022-01-12 14:21:33,315:INFO:SUCCESSFULLY LOADED TABLE DATA TO BIGQUERY TABLE_ID db2.matches
2022-01-12 14:21:33,316:INFO:========================================================================
# This is a sample Python script.
# Press Shift+F10 to execute it or replace it with your code.
# Press Double Shift to search everywhere for classes, files, tool windows, actions, and settings.
def print_hi(name):
# Use a breakpoint in the code line below to debug your script.
print(f'Hi, {name}') # Press Ctrl+F8 to toggle the breakpoint.
# Press the green button in the gutter to run the script.
if __name__ == '__main__':
print_hi('PyCharm')
# See PyCharm help at https://www.jetbrains.com/help/pycharm/
import configparser
from flask import Flask, flash, redirect, render_template, \
request, url_for
import pickle
from onpremisesdatabases.msqlpool import *
app = Flask(__name__)
@app.route('/')
def index():
return render_template(
'index.html',
data=[{'name':'MY SQL'}, {'name':'POSTGRES'}, {'name':'SQL SERVER'}])
@app.route("/test" , methods=['GET', 'POST'])
def test():
ms=NQ()
s=ms.MYSQLPOOl()
# config = configparser.ConfigParser()
# select = request.form.get('comp_select')
# list1=[]
return(redirect(url_for("thankyou",items=ms.alist())))
@app.route('/thankyou',methods=['GET', 'POST'])
def thankyou():
infile = open("my_data", 'rb')
list = pickle.load(infile)
infile.close()
return render_template("thankyou.html",items=list)
if __name__=='__main__':
app.run(debug=True)
import configparser
import os
import pickle
import pandas as pd
from flask import request, config
from google.cloud import bigquery
from google.cloud.exceptions import NotFound
from onpremisesdatabases.mysql1 import MSQLPOOl
from onpremisesdatabases.postgres1 import POSTGRESPOOL
from onpremisesdatabases.sqlserver1 import SQLSERVERPOOL
class NQ:
def MYSQLPOOl(self):
config = configparser.ConfigParser()
select = request.form.get('comp_select')
global list1
list1 = []
if select == "MY SQL":
mysqlpool = MSQLPOOl() # [msql,post,sqlse] forloop
try:
config.read("C:\\Users\\mfatima\\PycharmProjects\\flaskdatabasemigrations\\configure\\db_config.ini")
except FileNotFoundError as e:
print(e)
databases = config['DATABASES']['mysql_db_list'] # how
db_lists = databases.split(",") # ['db1',"db2"]
for db in db_lists:
try:
config.read(f"C:\\Users\\mfatima\\PycharmProjects\\flaskdatabasemigrations\\configure\\{db}_config.ini")
except FileNotFoundError as e:
print(e)
try:
schema_df = pd.read_csv(
f"C:\\Users\\mfatima\\PycharmProjects\\flaskdatabasemigrations\\schemafiles\\{db}_config.csv")
except FileNotFoundError as e:
print(e)
db_user = config['Server_Credentials']['db_user']
db_pass = config['Server_Credentials']['db_password']
db_server = config['Server_Credentials']['db_host']
db_name = config['Database']['db_name']
table_name = config['Database']['table_names']
table_names_list = table_name.split(",")
print(table_names_list)
conn = mysqlpool.get_connection(db_server, db_user, db_pass)
cursor = conn.cursor()
len_table = len(table_names_list)
# list1 = []
for table in table_names_list:
print(table)
df = mysqlpool.read_and_prepare_data(db_name, table, conn)
project_id = config['Google']['PROJECT_ID']
dataset_id = config['Google']['DATASET_ID']
os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = config['Google']['CREDENTIALS']
get_unique_columns = pd.unique(schema_df['table'])
demo_df = schema_df[schema_df['table'] == table]
schema_df1 = demo_df[['name', 'type', 'mode']]
schema_json = schema_df1.to_dict('records')
table_id = '{0}.{1}'.format(dataset_id, table)
mysqlpool.load_into_bq(df, table_id, project_id, schema_json)
print(f"loaded successfully {table}")
# status=True
client = bigquery.Client()
try:
client.get_table(table_id)
status = True
except NotFound:
status = False
list = [select, db_name, table, status]
list1.append(list)
print(list1)
filename = 'my_data'
outfile = open(filename, 'wb')
pickle.dump(list1, outfile)
outfile.close()
elif select == "POSTGRES":
postgrespool = POSTGRESPOOL()
try:
config.read(
"C:\\Users\\mfatima\\PycharmProjects\\flaskdatabasemigrations\\configure\\db_config.ini")
except FileNotFoundError as e:
print(e)
databases = config['DATABASES']['postgres_db_list']
db_lists = databases.split(",")
for db in db_lists:
try:
config.read(
f"C:\\Users\\mfatima\\PycharmProjects\\flaskdatabasemigrations\\configure\\{db}_config.ini")
except FileNotFoundError as e:
print(e)
try:
schema_df = pd.read_csv(
f"C:\\Users\\mfatima\\PycharmProjects\\flaskdatabasemigrations\\schemafiles\\{db}_config.csv")
except FileNotFoundError as e:
print(e)
db_user = config['Server_Credentials']['db_user']
db_pass = config['Server_Credentials']['db_password']
db_server = config['Server_Credentials']['db_host']
db_name = config['Database']['db_name']
table_name = config['Database']['table_names']
table_names_list = table_name.split(",")
conn = postgrespool.get_connection(db_server, db_user, db_pass, db_name)
cursor = conn.cursor()
for table in table_names_list:
print(table)
df = postgrespool.read_and_prepare_data(table, conn)
project_id = config['Google']['PROJECT_ID']
dataset_id = config['Google']['DATASET_ID']
os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = config['Google']['CREDENTIALS']
get_unique_columns = pd.unique(schema_df['table'])
demo_df = schema_df[schema_df['table'] == table]
schema_df1 = demo_df[['name', 'type', 'mode']]
schema_json = schema_df1.to_dict('records')
table_id = '{0}.{1}'.format(dataset_id, table)
postgrespool.load_into_bq(df, table_id, project_id, schema_json)
print(f"loaded successfully {table}")
client = bigquery.Client()
try:
client.get_table(table_id)
status = True
except NotFound:
status = False
list = [select, db_name, table, status]
list1.append(list)
print(list1)
filename = 'my_data'
outfile = open(filename, 'wb')
pickle.dump(list1, outfile)
outfile.close()
elif select == "SQL SERVER":
sqlserverpool = SQLSERVERPOOL()
try:
config.read("C:\\Users\\mfatima\\PycharmProjects\\flaskdatabasemigrations\\configure\\db_config.ini")
except FileNotFoundError as e:
print(e)
databases = config['DATABASES']['sqlserver_db_list']
db_lists = databases.split(",")
for db in db_lists:
try:
config.read(f"C:\\Users\\mfatima\\PycharmProjects\\flaskdatabasemigrations\\configure\\{db}_config.ini")
except FileNotFoundError as e:
print(e)
try:
schema_df = pd.read_csv(
f"C:\\Users\\mfatima\\PycharmProjects\\flaskdatabasemigrations\\schemafiles\\{db}_config.csv")
except FileNotFoundError as e:
print(e)
db_name = config['Database']['db_name']
table_name = config['Database']['table_names']
table_names_list = table_name.split(",")
conn = sqlserverpool.get_connection(db_name)
cursor = conn.cursor()
for table in table_names_list:
print(table)
df = sqlserverpool.read_and_prepare_data(db_name, table, conn)
project_id = config['Google']['PROJECT_ID']
dataset_id = config['Google']['DATASET_ID']
os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = config['Google']['CREDENTIALS']
get_unique_columns = pd.unique(schema_df['table'])
demo_df = schema_df[schema_df['table'] == table]
schema_df1 = demo_df[['name', 'type', 'mode']]
schema_json = schema_df1.to_dict('records')
table_id = '{0}.{1}'.format(dataset_id, table)
sqlserverpool.load_into_bq(df, table_id, project_id, schema_json)
print(f"loaded successfully {table}")
client = bigquery.Client()
try:
client.get_table(table_id)
status = True
except NotFound:
status = False
list = [select, db_name, table, status]
list1.append(list)
print(list1)
filename = 'my_data'
outfile = open(filename, 'wb')
pickle.dump(list1, outfile)
outfile.close()
def alist(self):
global list1
return list1
\ No newline at end of file
import pandas_gbq as pd_gbq
import pandas as pd
import pymysql
class MSQLPOOl:
def get_connection(self,db_host,db_user,db_pass):
try:
conn = pymysql.connect(
host=db_host,
user=db_user,
password=db_pass,
)
except Exception as e:
print(e)
else:
return conn
def read_and_prepare_data(self,db_name,table,conn):
try:
query = "SELECT * FROM {0}.{1} ;".format(db_name,table)
df = pd.read_sql(query,conn)
except Exception as e:
print(e)
else:
return df
def load_into_bq(self,df,table_id,project_id,schema_json):
try:
pd_gbq.to_gbq(df,table_id,
project_id=project_id,
table_schema=schema_json,
if_exists='replace')
except Exception as e:
print(e)
import pandas_gbq as pd_gbq
import pandas as pd
import psycopg2
class POSTGRESPOOL:
def get_connection(self,db_host,db_user,db_pass,db_name):
try:
conn = psycopg2.connect(
host=db_host,
user=db_user,
password=db_pass,
database= db_name
)
except Exception as e:
print(e)
else:
return conn
def read_and_prepare_data(self,table,conn):
try:
query = "SELECT * FROM {0};".format(table)
df = pd.read_sql(query,conn)
except Exception as e:
print(e)
else:
return df
def load_into_bq(self,df,table_id,project_id,schema_json):
try:
pd_gbq.to_gbq(df,table_id,
project_id=project_id,
table_schema=schema_json,
if_exists='replace')
except Exception as e:
print(e)
import configparser
import os
import pickle
import pandas as pd
from flask import request
from google.cloud import bigquery
from google.cloud.exceptions import NotFound
from onpremisesdatabases.postgres1 import POSTGRESPOOL
class PQ:
def POSTGRESPOOl(self):
config = configparser.ConfigParser()
select = request.form.get('comp_select')
global list1
list1 = []
if select == "POSTGRES":
postgrespool = POSTGRESPOOL()
try:
config.read("C:\\Users\\mfatima\\PycharmProjects\\flaskdatabasemigrations\\configure\\db_config.ini")
except FileNotFoundError as e:
print(e)
databases = config['DATABASES']['postgres_db_list']
db_lists = databases.split(",") # ['db1',"db2"]
for db in db_lists:
try:
config.read(f"C:\\Users\\mfatima\\PycharmProjects\\flaskdatabasemigrations\\configure\\{db}_config.ini")
except FileNotFoundError as e:\
print(e)
try:
schema_df = pd.read_csv(
f"C:\\Users\\mfatima\\PycharmProjects\\flaskdatabasemigrations\\schemafiles\\{db}_config.csv")
except FileNotFoundError as e:
print(e)
db_user = config['Server_Credentials']['db_user']
db_pass = config['Server_Credentials']['db_password']
db_server = config['Server_Credentials']['db_host']
db_name = config['Database']['db_name']
table_name = config['Database']['table_names']
table_names_list = table_name.split(",")
print(table_names_list)
conn = postgrespool.get_connection(db_server, db_user, db_pass)
cursor = conn.cursor()
len_table = len(table_names_list)
# list1 = []
for table in table_names_list:
print(table)
df = postgrespool.read_and_prepare_data(db_name, table, conn)
project_id = config['Google']['PROJECT_ID']
dataset_id = config['Google']['DATASET_ID']
os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = config['Google']['CREDENTIALS']
get_unique_columns = pd.unique(schema_df['table'])
demo_df = schema_df[schema_df['table'] == table]
schema_df1 = demo_df[['name', 'type', 'mode']]
schema_json = schema_df1.to_dict('records')
table_id = '{0}.{1}'.format(dataset_id, table)
postgrespool.load_into_bq(df, table_id, project_id, schema_json)
print(f"loaded successfully {table}")
# status=True
client = bigquery.Client()
try:
client.get_table(table_id)
status = True
except NotFound:
status = False
list = [select, db_name, table, status]
list1.append(list)
print(list1)
filename = 'my_data'
outfile = open(filename, 'wb')
pickle.dump(list1, outfile)
outfile.close()
else:
print("Selected None");
def alist(self):
global list1
return list1
\ No newline at end of file
import pandas_gbq as pd_gbq
import pandas as pd
import pyodbc
class SQLSERVERPOOL:
def get_connection(self,db_name):
try:
pyodbc.drivers()
conn = pyodbc.connect('DRIVER=SQL Server;SERVER=HYD-LAP-0370\SQLEXPRESS;DATABASE={0};Trusted_Connection=yes;'.format(db_name))
except Exception as e:
print(e)
else:
return conn
def read_and_prepare_data(self,db_name,table,conn):
try:
query = "SELECT * FROM {0}.dbo.{1};".format(db_name,table)
df = pd.read_sql(query,conn)
except Exception as e:
print(e)
else:
return df
def load_into_bq(self,df,table_id,project_id,schema_json):
try:
pd_gbq.to_gbq(df,table_id,
project_id=project_id,
table_schema=schema_json,
if_exists='replace')
except Exception as e:
print(e)
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment