text
stringlengths
81
112k
Use this decorator to cache functions that have predefined first arg. enable_cache is treated as True by default, except enable_cache = False is passed to the decorated function. force means whether to force refresh the cache and is treated as False by default, except force = True is passed to the dec...
Name property def name(self): """Name property""" ts = datetime.now().isoformat() ts = ts.replace('-', '').replace(':', '').split('.')[0] tab = (self.tab_name.replace(' ', '_').lower() if self.tab_name else 'notab') tab = re.sub(r'\W+', '', tab) return f's...
Check if user can access a cached response from explore_json. This function takes `self` since it must have the same signature as the the decorated method. def check_datasource_perms(self, datasource_type=None, datasource_id=None): """ Check if user can access a cached response from explore_json. ...
Check if user can access a cached response from slice_json. This function takes `self` since it must have the same signature as the the decorated method. def check_slice_perms(self, slice_id): """ Check if user can access a cached response from slice_json. This function takes `self` since it must...
Applies the configuration's http headers to all responses def apply_caching(response): """Applies the configuration's http headers to all responses""" for k, v in config.get('HTTP_HEADERS').items(): response.headers[k] = v return response
Updates the role with the give datasource permissions. Permissions not in the request will be revoked. This endpoint should be available to admins only. Expects JSON in the format: { 'role_name': '{role_name}', 'database': [{ 'datasource_type': '{t...
Serves all request that GET or POST form_data This endpoint evolved to be the entry point of many different requests that GETs or POSTs a form_data. `self.generate_json` receives this input and returns different payloads based on the request args in the first block TODO: break...
Overrides the dashboards using json instances from the file. def import_dashboards(self): """Overrides the dashboards using json instances from the file.""" f = request.files.get('file') if request.method == 'POST' and f: dashboard_import_export.import_dashboards(db.session, f.strea...
Deprecated endpoint, here for backward compatibility of urls def explorev2(self, datasource_type, datasource_id): """Deprecated endpoint, here for backward compatibility of urls""" return redirect(url_for( 'Superset.explore', datasource_type=datasource_type, datasour...
Endpoint to retrieve values for specified column. :param datasource_type: Type of datasource e.g. table :param datasource_id: Datasource id :param column: Column name to retrieve values for :return: def filter(self, datasource_type, datasource_id, column): """ Endpoint ...
Save or overwrite a slice def save_or_overwrite_slice( self, args, slc, slice_add_perm, slice_overwrite_perm, slice_download_perm, datasource_id, datasource_type, datasource_name): """Save or overwrite a slice""" slice_name = args.get('slice_name') action = args.get('act...
endpoint for checking/unchecking any boolean in a sqla model def checkbox(self, model_view, id_, attr, value): """endpoint for checking/unchecking any boolean in a sqla model""" modelview_to_model = { '{}ColumnInlineView'.format(name.capitalize()): source.column_class for name, ...
Endpoint to fetch the list of tables for given database def tables(self, db_id, schema, substr, force_refresh='false'): """Endpoint to fetch the list of tables for given database""" db_id = int(db_id) force_refresh = force_refresh.lower() == 'true' schema = utils.js_string_to_python(sch...
Copy dashboard def copy_dash(self, dashboard_id): """Copy dashboard""" session = db.session() data = json.loads(request.form.get('data')) dash = models.Dashboard() original_dash = ( session .query(models.Dashboard) .filter_by(id=dashboard_id)....
Save a dashboard's metadata def save_dash(self, dashboard_id): """Save a dashboard's metadata""" session = db.session() dash = (session .query(models.Dashboard) .filter_by(id=dashboard_id).first()) check_ownership(dash, raise_if_false=True) data =...
Add and save slices to a dashboard def add_slices(self, dashboard_id): """Add and save slices to a dashboard""" data = json.loads(request.form.get('data')) session = db.session() Slice = models.Slice # noqa dash = ( session.query(models.Dashboard).filter_by(id=dashb...
Recent activity (actions) for a given user def recent_activity(self, user_id): """Recent activity (actions) for a given user""" M = models # noqa if request.args.get('limit'): limit = int(request.args.get('limit')) else: limit = 1000 qry = ( ...
This lets us use a user's username to pull favourite dashboards def fave_dashboards_by_username(self, username): """This lets us use a user's username to pull favourite dashboards""" user = security_manager.find_user(username=username) return self.fave_dashboards(user.get_id())
List of slices a user created, or faved def user_slices(self, user_id=None): """List of slices a user created, or faved""" if not user_id: user_id = g.user.id Slice = models.Slice # noqa FavStar = models.FavStar # noqa qry = ( db.session.query(Slice, ...
List of slices created by this user def created_slices(self, user_id=None): """List of slices created by this user""" if not user_id: user_id = g.user.id Slice = models.Slice # noqa qry = ( db.session.query(Slice) .filter( sqla.or_( ...
Favorite slices for a user def fave_slices(self, user_id=None): """Favorite slices for a user""" if not user_id: user_id = g.user.id qry = ( db.session.query( models.Slice, models.FavStar.dttm, ) .join( ...
Warms up the cache for the slice or table. Note for slices a force refresh occurs. def warm_up_cache(self): """Warms up the cache for the slice or table. Note for slices a force refresh occurs. """ slices = None session = db.session() slice_id = request.args.ge...
Toggle favorite stars on Slices and Dashboard def favstar(self, class_name, obj_id, action): """Toggle favorite stars on Slices and Dashboard""" session = db.session() FavStar = models.FavStar # noqa count = 0 favs = session.query(FavStar).filter_by( class_name=clas...
Server side rendering for a dashboard def dashboard(self, dashboard_id): """Server side rendering for a dashboard""" session = db.session() qry = session.query(models.Dashboard) if dashboard_id.isdigit(): qry = qry.filter_by(id=int(dashboard_id)) else: qr...
Syncs the druid datasource in main db with the provided config. The endpoint takes 3 arguments: user - user name to perform the operation as cluster - name of the druid cluster config - configuration stored in json that contains: name: druid datasource name ...
Returns if a key from cache exist def cache_key_exist(self, key): """Returns if a key from cache exist""" key_exist = True if cache.get(key) else False status = 200 if key_exist else 404 return json_success(json.dumps({'key_exist': key_exist}), status=status)
Serves a key off of the results backend def results(self, key): """Serves a key off of the results backend""" if not results_backend: return json_error_response("Results backend isn't configured") read_from_results_backend_start = now_as_float() blob = results_backend.get(k...
Runs arbitrary sql and returns and json def sql_json(self): """Runs arbitrary sql and returns and json""" async_ = request.form.get('runAsync') == 'true' sql = request.form.get('sql') database_id = request.form.get('database_id') schema = request.form.get('schema') or None ...
Download the query results as csv. def csv(self, client_id): """Download the query results as csv.""" logging.info('Exporting CSV file [{}]'.format(client_id)) query = ( db.session.query(Query) .filter_by(client_id=client_id) .one() ) rejecte...
Get the updated queries. def queries(self, last_updated_ms): """Get the updated queries.""" stats_logger.incr('queries') if not g.user.get_id(): return json_error_response( 'Please login to access the queries.', status=403) # Unix time, milliseconds. ...
Search for previously run sqllab queries. Used for Sqllab Query Search page /superset/sqllab#search. Custom permission can_only_search_queries_owned restricts queries to only queries run by current user. :returns: Response with list of sql query dicts def search_queries(self) -> Respo...
Personalized welcome page def welcome(self): """Personalized welcome page""" if not g.user or not g.user.get_id(): return redirect(appbuilder.get_url_for_login) welcome_dashboard_id = ( db.session .query(UserAttribute.welcome_dashboard_id) .filte...
User profile page def profile(self, username): """User profile page""" if not username and g.user: username = g.user.username payload = { 'user': bootstrap_user_data(username, include_perms=True), 'common': self.common_bootsrap_payload(), } ...
SQL Editor def sqllab(self): """SQL Editor""" d = { 'defaultDbId': config.get('SQLLAB_DEFAULT_DBID'), 'common': self.common_bootsrap_payload(), } return self.render_template( 'superset/basic.html', entry='sqllab', bootstrap_dat...
This method exposes an API endpoint to get the database query string for this slice def slice_query(self, slice_id): """ This method exposes an API endpoint to get the database query string for this slice """ viz_obj = get_viz(slice_id) security_manager.assert_da...
This method exposes an API endpoint to get the schema access control settings for csv upload in this database def schemas_access_for_csv_upload(self): """ This method exposes an API endpoint to get the schema access control settings for csv upload in this database """ if...
Provide a transactional scope around a series of operations. def stats_timing(stats_key, stats_logger): """Provide a transactional scope around a series of operations.""" start_ts = now_as_float() try: yield start_ts except Exception as e: raise e finally: stats_logger.timin...
A decorator for caching views and handling etag conditional requests. The decorator adds headers to GET requests that help with caching: Last- Modified, Expires and ETag. It also handles conditional requests, when the client send an If-Matches header. If a cache is set, the decorator will cache GET re...
Alters the SQL statement to apply a LIMIT clause def apply_limit_to_sql(cls, sql, limit, database): """Alters the SQL statement to apply a LIMIT clause""" if cls.limit_method == LimitMethod.WRAP_SQL: sql = sql.strip('\t\n ;') qry = ( select('*') ....
Modify the SQL Alchemy URL object with the user to impersonate if applicable. :param url: SQLAlchemy URL object :param impersonate_user: Bool indicating if impersonation is enabled :param username: Effective username def modify_url_for_impersonation(cls, url, impersonate_user, username): ...
Conditionally mutate and/or quote a sql column/expression label. If force_column_alias_quotes is set to True, return the label as a sqlalchemy.sql.elements.quoted_name object to ensure that the select query and query results have same case. Otherwise return the mutated label as a regular...
In the case that a label exceeds the max length supported by the engine, this method is used to construct a deterministic and unique label based on an md5 hash. def truncate_label(cls, label): """ In the case that a label exceeds the max length supported by the engine, this meth...
Need to consider foreign tables for PostgreSQL def get_table_names(cls, inspector, schema): """Need to consider foreign tables for PostgreSQL""" tables = inspector.get_table_names(schema) tables.extend(inspector.get_foreign_table_names(schema)) return sorted(tables)
Postgres is unable to identify mixed case column names unless they are quoted. def get_timestamp_column(expression, column_name): """Postgres is unable to identify mixed case column names unless they are quoted.""" if expression: return expression elif column_name.lo...
Extract error message for queries def extract_error_message(cls, e): """Extract error message for queries""" message = str(e) try: if isinstance(e.args, tuple) and len(e.args) > 1: message = e.args[1] except Exception: pass return message
Returns a list of tables [schema1.table1, schema2.table2, ...] Datasource_type can be 'table' or 'view'. Empty schema corresponds to the list of full names of the all tables or views: <schema>.<result_set_name>. def fetch_result_sets(cls, db, datasource_type): """Returns a list of tabl...
Updates progress information def handle_cursor(cls, cursor, query, session): """Updates progress information""" logging.info('Polling the cursor for progress') polled = cursor.poll() # poll returns dict -- JSON status information or ``None`` # if the query is done # http...
Returns a partition query :param table_name: the name of the table to get partitions from :type table_name: str :param limit: the number of partitions to be returned :type limit: int :param order_by: a list of tuples of field name and a boolean that determines if tha...
Uploads a csv file and creates a superset datasource in Hive. def create_table_from_csv(form, table): """Uploads a csv file and creates a superset datasource in Hive.""" def convert_to_hive_type(col_type): """maps tableschema's types to hive types""" tableschema_to_hive_types = ...
Updates progress information def handle_cursor(cls, cursor, query, session): """Updates progress information""" from pyhive import hive # pylint: disable=no-name-in-module unfinished_states = ( hive.ttypes.TOperationState.INITIALIZED_STATE, hive.ttypes.TOperationState.R...
Return a configuration dictionary that can be merged with other configs that can set the correct properties for impersonating users :param uri: URI string :param impersonate_user: Bool indicating if impersonation is enabled :param username: Effective username :return: Dictionary ...
BigQuery field_name should start with a letter or underscore and contain only alphanumeric characters. Labels that start with a number are prefixed with an underscore. Any unsupported characters are replaced with underscores and an md5 hash is added to the end of the label to avoid possible coll...
BigQuery dialect requires us to not use backtick in the fieldname which are nested. Using literal_column handles that issue. https://docs.sqlalchemy.org/en/latest/core/tutorial.html#using-more-specific-text-with-table-literal-column-and-column Also explicility specifying column names so ...
Loading time series data from a zip file in the repo def load_multiformat_time_series(): """Loading time series data from a zip file in the repo""" data = get_example_data('multiformat_time_series.json.gz') pdf = pd.read_json(data) pdf.ds = pd.to_datetime(pdf.ds, unit='s') pdf.ds2 = pd.to_datetime...
Imports dashboards from a stream to databases def import_dashboards(session, data_stream, import_time=None): """Imports dashboards from a stream to databases""" current_tt = int(time.time()) import_time = current_tt if import_time is None else import_time data = json.loads(data_stream.read(), object_ho...
Returns all dashboards metadata as a json dump def export_dashboards(session): """Returns all dashboards metadata as a json dump""" logging.info('Starting export') dashboards = session.query(Dashboard) dashboard_ids = [] for dashboard in dashboards: dashboard_ids.append(dashboard.id) da...
The cache key is made out of the key/values in `query_obj`, plus any other key/values in `extra` We remove datetime bounds that are hard values, and replace them with the use-provided inputs to bounds, which may be time-relative (as in "5 days ago" or "now"). def cache_key(self, **extra...
Local method handling error while processing the SQL def handle_query_error(msg, query, session, payload=None): """Local method handling error while processing the SQL""" payload = payload or {} troubleshooting_link = config['TROUBLESHOOTING_LINK'] query.error_message = msg query.status = QueryStat...
attemps to get the query and retry if it cannot def get_query(query_id, session, retry_count=5): """attemps to get the query and retry if it cannot""" query = None attempt = 0 while not query and attempt < retry_count: try: query = session.query(Query).filter_by(id=query_id).one() ...
Provide a transactional scope around a series of operations. def session_scope(nullpool): """Provide a transactional scope around a series of operations.""" if nullpool: engine = sqlalchemy.create_engine( app.config.get('SQLALCHEMY_DATABASE_URI'), poolclass=NullPool) session_class =...
Executes the sql query returns the results. def get_sql_results( ctask, query_id, rendered_query, return_results=True, store_results=False, user_name=None, start_time=None): """Executes the sql query returns the results.""" with session_scope(not ctask.request.called_directly) as session: ...
Executes a single SQL statement def execute_sql_statement(sql_statement, query, user_name, session, cursor): """Executes a single SQL statement""" database = query.database db_engine_spec = database.db_engine_spec parsed_query = ParsedQuery(sql_statement) sql = parsed_query.stripped() SQL_MAX_R...
Executes the sql query returns the results. def execute_sql_statements( ctask, query_id, rendered_query, return_results=True, store_results=False, user_name=None, session=None, start_time=None, ): """Executes the sql query returns the results.""" if store_results and start_time: # only asynchro...
Flask's flash if available, logging call if not def flasher(msg, severity=None): """Flask's flash if available, logging call if not""" try: flash(msg, severity) except RuntimeError: if severity == 'danger': logging.error(msg) else: logging.info(msg)
Converts a string to an int/float Returns ``None`` if it can't be converted >>> string_to_num('5') 5 >>> string_to_num('5.2') 5.2 >>> string_to_num(10) 10 >>> string_to_num(10.1) 10.1 >>> string_to_num('this is not a string') is None True def string_to_num(s: str): """...
Returns l without what is in minus >>> list_minus([1, 2, 3], [2]) [1, 3] def list_minus(l: List, minus: List) -> List: """Returns l without what is in minus >>> list_minus([1, 2, 3], [2]) [1, 3] """ return [o for o in l if o not in minus]
Returns ``datetime.datetime`` from human readable strings >>> from datetime import date, timedelta >>> from dateutil.relativedelta import relativedelta >>> parse_human_datetime('2015-04-03') datetime.datetime(2015, 4, 3, 0, 0) >>> parse_human_datetime('2/3/1969') datetime.datetime(1969, 2, 3, 0...
Function to be passed into json.loads obj_hook parameter Recreates the dashboard object from a json representation. def decode_dashboards(o): """ Function to be passed into json.loads obj_hook parameter Recreates the dashboard object from a json representation. """ import superset.models.core a...
Returns ``datetime.datetime`` from natural language time deltas >>> parse_human_datetime('now') <= datetime.now() True def parse_human_timedelta(s: str): """ Returns ``datetime.datetime`` from natural language time deltas >>> parse_human_datetime('now') <= datetime.now() True """ cal ...
Formats datetime to take less room when it is recent def datetime_f(dttm): """Formats datetime to take less room when it is recent""" if dttm: dttm = dttm.isoformat() now_iso = datetime.now().isoformat() if now_iso[:10] == dttm[:10]: dttm = dttm[11:] elif now_iso[:4]...
json serializer that deals with dates >>> dttm = datetime(1970, 1, 1) >>> json.dumps({'dttm': dttm}, default=json_iso_dttm_ser) '{"dttm": "1970-01-01T00:00:00"}' def json_iso_dttm_ser(obj, pessimistic: Optional[bool] = False): """ json serializer that deals with dates >>> dttm = datetime(1970...
json serializer that deals with dates def json_int_dttm_ser(obj): """json serializer that deals with dates""" val = base_json_conv(obj) if val is not None: return val if isinstance(obj, (datetime, pd.Timestamp)): obj = datetime_to_epoch(obj) elif isinstance(obj, date): obj =...
Translate exception into error message Database have different ways to handle exception. This function attempts to make sense of the exception object and construct a human readable sentence. TODO(bkyryliuk): parse the Presto error message from the connection created via create_eng...
Utility to find a constraint name in alembic migrations def generic_find_constraint_name(table, columns, referenced, db): """Utility to find a constraint name in alembic migrations""" t = sa.Table(table, db.metadata, autoload=True, autoload_with=db.engine) for fk in t.foreign_key_constraints: if f...
Utility to find a foreign-key constraint name in alembic migrations def generic_find_fk_constraint_name(table, columns, referenced, insp): """Utility to find a foreign-key constraint name in alembic migrations""" for fk in insp.get_foreign_keys(table): if fk['referred_table'] == referenced and set(fk['...
Utility to find foreign-key constraint names in alembic migrations def generic_find_fk_constraint_names(table, columns, referenced, insp): """Utility to find foreign-key constraint names in alembic migrations""" names = set() for fk in insp.get_foreign_keys(table): if fk['referred_table'] == refer...
Utility to find a unique constraint name in alembic migrations def generic_find_uq_constraint_name(table, columns, insp): """Utility to find a unique constraint name in alembic migrations""" for uq in insp.get_unique_constraints(table): if columns == set(uq['column_names']): return uq['nam...
Utility to find a constraint name in alembic migrations def table_has_constraint(table, name, db): """Utility to find a constraint name in alembic migrations""" t = sa.Table(table, db.metadata, autoload=True, autoload_with=db.engine) for c in t.constraints: if c.name == name: return Tr...
Send an email with html content, eg: send_email_smtp( 'test@example.com', 'foo', '<b>Foo</b> bar',['/dev/null'], dryrun=True) def send_email_smtp(to, subject, html_content, config, files=None, data=None, images=None, dryrun=False, cc=None, bcc=None, mime_subtype='mix...
Setup the flask-cache on a flask app def setup_cache(app: Flask, cache_config) -> Optional[Cache]: """Setup the flask-cache on a flask app""" if cache_config and cache_config.get('CACHE_TYPE') != 'null': return Cache(app, config=cache_config) return None
Compress things in a py2/3 safe fashion >>> json_str = '{"test": 1}' >>> blob = zlib_compress(json_str) def zlib_compress(data): """ Compress things in a py2/3 safe fashion >>> json_str = '{"test": 1}' >>> blob = zlib_compress(json_str) """ if PY3K: if isinstance(data, str): ...
Decompress things to a string in a py2/3 safe fashion >>> json_str = '{"test": 1}' >>> blob = zlib_compress(json_str) >>> got_str = zlib_decompress_to_string(blob) >>> got_str == json_str True def zlib_decompress_to_string(blob): """ Decompress things to a string in a py2/3 safe fashion ...
Given a user ORM FAB object, returns a label def user_label(user: User) -> Optional[str]: """Given a user ORM FAB object, returns a label""" if user: if user.first_name and user.last_name: return user.first_name + ' ' + user.last_name else: return user.username retu...
Return `since` and `until` date time tuple from string representations of time_range, since, until and time_shift. This functiom supports both reading the keys separately (from `since` and `until`), as well as the new `time_range` key. Valid formats are: - ISO 8601 - X days/years/hours/day...
Backwards compatibility hack. Without this slices with since: 7 days will be treated as 7 days in the future. :param str since: :returns: Since with ago added if necessary :rtype: str def add_ago_to_since(since: str) -> str: """ Backwards compatibility hack. Without this slices with since: 7 d...
Mutates form data to restructure the adhoc filters in the form of the four base filters, `where`, `having`, `filters`, and `having_filters` which represent free form where sql, free form having sql, structured where clauses and structured having clauses. def split_adhoc_filters_into_base_filters(fd): "...
Loads an energy related dataset to use with sankey and graphs def load_energy(): """Loads an energy related dataset to use with sankey and graphs""" tbl_name = 'energy_usage' data = get_example_data('energy.json.gz') pdf = pd.read_json(data) pdf.to_sql( tbl_name, db.engine, ...
Loading random time series data from a zip file in the repo def load_random_time_series_data(): """Loading random time series data from a zip file in the repo""" data = get_example_data('random_time_series.json.gz') pdf = pd.read_json(data) pdf.ds = pd.to_datetime(pdf.ds, unit='s') pdf.to_sql( ...
Starts a Superset web server. def runserver(debug, console_log, use_reloader, address, port, timeout, workers, socket): """Starts a Superset web server.""" debug = debug or config.get('DEBUG') or console_log if debug: print(Fore.BLUE + '-=' * 20) print( Fore.YELLOW + 'Starting S...
Prints the current version number def version(verbose): """Prints the current version number""" print(Fore.BLUE + '-=' * 15) print(Fore.YELLOW + 'Superset ' + Fore.CYAN + '{version}'.format( version=config.get('VERSION_STRING'))) print(Fore.BLUE + '-=' * 15) if verbose: print('[DB] ...
Refresh druid datasources def refresh_druid(datasource, merge): """Refresh druid datasources""" session = db.session() from superset.connectors.druid.models import DruidCluster for cluster in session.query(DruidCluster).all(): try: cluster.refresh_datasources(datasource_name=datasou...
Import dashboards from JSON def import_dashboards(path, recursive): """Import dashboards from JSON""" p = Path(path) files = [] if p.is_file(): files.append(p) elif p.exists() and not recursive: files.extend(p.glob('*.json')) elif p.exists() and recursive: files.extend(p...
Export dashboards to JSON def export_dashboards(print_stdout, dashboard_file): """Export dashboards to JSON""" data = dashboard_import_export.export_dashboards(db.session) if print_stdout or not dashboard_file: print(data) if dashboard_file: logging.info('Exporting dashboards to %s', da...
Import datasources from YAML def import_datasources(path, sync, recursive): """Import datasources from YAML""" sync_array = sync.split(',') p = Path(path) files = [] if p.is_file(): files.append(p) elif p.exists() and not recursive: files.extend(p.glob('*.yaml')) files.e...
Export datasources to YAML def export_datasources(print_stdout, datasource_file, back_references, include_defaults): """Export datasources to YAML""" data = dict_import_export.export_to_dict( session=db.session, recursive=True, back_references=back_references, ...
Export datasource YAML schema to stdout def export_datasource_schema(back_references): """Export datasource YAML schema to stdout""" data = dict_import_export.export_schema_to_dict( back_references=back_references) yaml.safe_dump(data, stdout, default_flow_style=False)
Refresh sqllab datasources cache def update_datasources_cache(): """Refresh sqllab datasources cache""" from superset.models.core import Database for database in db.session.query(Database).all(): if database.allow_multi_schema_metadata_fetch: print('Fetching {} datasources ...'.format(d...
Starts a Superset worker for async SQL query execution. def worker(workers): """Starts a Superset worker for async SQL query execution.""" logging.info( "The 'superset worker' command is deprecated. Please use the 'celery " "worker' command instead.") if workers: celery_app.conf.upd...
Runs a Celery Flower web server Celery Flower is a UI to monitor the Celery operation on a given broker def flower(port, address): """Runs a Celery Flower web server Celery Flower is a UI to monitor the Celery operation on a given broker""" BROKER_URL = celery_app.conf.BROKER_URL cmd = ( ...
Loading random time series data from a zip file in the repo def load_flights(): """Loading random time series data from a zip file in the repo""" tbl_name = 'flights' data = get_example_data('flight_data.csv.gz', make_bytes=True) pdf = pd.read_csv(data, encoding='latin-1') # Loading airports info ...