--- /dev/null
+# Byte-compiled / optimized / DLL files
+__pycache__/
+
+**.py[cod]
+**$py.class
+data/
+archives/
+
+bonnes_pratiques.txt
+celerybeat*
+.~lock*
--- /dev/null
+run:
+ #@python -m test_celery.run_tasks
+ @python main.py
+
+clean-db:
+ @python extomeAI/lib/cleaner.py db
+
+clean-data:
+ @python extomeAI/lib/cleaner.py data
+
+clean-all:
+ @python extomeAI/lib/cleaner.py all
+
--- /dev/null
+[meteofrance]
+regenerate = False
+reinsert = True
--- /dev/null
+abbreviation,METFT_NAME,unit,type,PARAM_ID_PARAMETER
+t,temperature,K,real,1
+pres,pressure,Pa,integer,1
+tend,pressureVariation,Pa,integer,1
+cod_tend,BarometricTrend,code,integer,2
+u,humidity,%,integer,1
+td,dewPoint,K,real,1
+rr1,lastHourRainfall,mm,real,1
+rr3,last3hHourRainfall,mm,real,1
+ff,meanWindSpeed10min,m/s,real,1
+dd,meanWindDirection10min,degré,integer,1
+rafper,gustsOverAPeriod,m/s,real,1
+vv,horizontalVisibility,m,real,1
+ww,currentWeather,code,integer,2
--- /dev/null
+PARAM_NAME
+Numerical
+Categorical
+Both
--- /dev/null
+[loggers]
+keys=root
+
+[handlers]
+keys=stream_handler
+
+[formatters]
+keys=formatter
+
+[logger_root]
+level=DEBUG
+handlers=stream_handler
+
+[handler_stream_handler]
+class=StreamHandler
+level=DEBUG
+formatter=formatter
+args=(sys.stderr,)
+
+[formatter_formatter]
+format=%(asctime)s %(name)-12s %(levelname)-8s %(message)s
\ No newline at end of file
--- /dev/null
+[postgresql]
+host = localhost
+user = christophe
+port = 5432
+dbname = extome
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+CAUTION: Do not modify this file unless you know what you are doing.
+ Unexpected results may occur if the code is changed deliberately.
+-->
+<dbmodel pgmodeler-ver="0.9.2" last-position="0,89" last-zoom="1" max-obj-count="4"
+ default-schema="public" default-owner="postgres">
+<database name="extome" is-template="false" allow-conns="true">
+</database>
+
+<schema name="public" layer="0" fill-color="#e1e1e1" sql-disabled="true">
+</schema>
+
+<table name="METEO_STATION" layer="0" collapse-mode="2" max-obj-count="6">
+ <schema name="public"/>
+ <role name="postgres"/>
+ <position x="660" y="540"/>
+ <column name="METST_ID" not-null="true"
+ identity-type="BY DEFAULT">
+ <type name="smallint" length="0"/>
+ </column>
+ <column name="METST_NAME" not-null="true">
+ <type name="text" length="0"/>
+ </column>
+ <column name="METST_IDNAME" not-null="true">
+ <type name="text" length="0"/>
+ </column>
+ <column name="METST_LOCATION" not-null="true">
+ <type name="point" length="0"/>
+ </column>
+ <column name="METST_TIMEZONE" not-null="true">
+ <type name="smallint" length="0"/>
+ </column>
+ <constraint name="METEO_STATION_pk" type="pk-constr" table="public."METEO_STATION"">
+ <columns names="METST_ID" ref-type="src-columns"/>
+ </constraint>
+</table>
+
+<table name="METEO_FEATURE" layer="0" collapse-mode="2" max-obj-count="3">
+ <schema name="public"/>
+ <role name="postgres"/>
+ <position x="640" y="280"/>
+ <column name="METFT_ID" not-null="true"
+ identity-type="BY DEFAULT">
+ <type name="smallint" length="0"/>
+ </column>
+ <column name="METFT_NAME" not-null="true">
+ <type name="text" length="0"/>
+ </column>
+ <constraint name="METEO_FEATURE_pk" type="pk-constr" table="public."METEO_FEATURE"">
+ <columns names="METFT_ID" ref-type="src-columns"/>
+ </constraint>
+</table>
+
+<table name="METEO_FEATURE_VALUE" layer="0" collapse-mode="2" max-obj-count="6">
+ <schema name="public"/>
+ <role name="postgres"/>
+ <position x="140" y="480"/>
+ <column name="METVA_ID" not-null="true"
+ identity-type="ALWAYS">
+ <type name="bigint" length="0"/>
+ </column>
+ <column name="METVA_VALUE">
+ <type name="numeric" length="0"/>
+ </column>
+ <column name="METVA_DATETIME" not-null="true">
+ <type name="timestamptz" length="0"/>
+ </column>
+ <constraint name="METEO_FEATURE_VALUE_pk" type="pk-constr" table="public."METEO_FEATURE_VALUE"">
+ <columns names="METVA_ID" ref-type="src-columns"/>
+ </constraint>
+</table>
+
+<table name="PARAMETER" layer="0" collapse-mode="2" max-obj-count="2">
+ <schema name="public"/>
+ <role name="postgres"/>
+ <position x="1120" y="300"/>
+ <column name="PARAM_ID" not-null="true"
+ identity-type="ALWAYS">
+ <type name="smallint" length="0"/>
+ </column>
+ <column name="PARAM_NAME" not-null="true">
+ <type name="text" length="0"/>
+ </column>
+ <constraint name="PARAMETER_pk" type="pk-constr" table="public."PARAMETER"">
+ <columns names="PARAM_ID" ref-type="src-columns"/>
+ </constraint>
+</table>
+
+<relationship name="METEO_FEATURE_has_many_METEO_FEATURE_VALUE" type="rel1n" layer="0"
+ src-col-pattern="{sc}_{st}"
+ pk-pattern="{dt}_pk" uq-pattern="{dt}_uq"
+ src-fk-pattern="{st}_fk"
+ custom-color="#4ffef3"
+ src-table="public."METEO_FEATURE""
+ dst-table="public."METEO_FEATURE_VALUE""
+ src-required="false" dst-required="false">
+ <label ref-type="name-label">
+ <position x="-9.94066" y="1.0862"/>
+ </label>
+</relationship>
+
+<relationship name="METEO_STATION_has_many_METEO_FEATURE_VALUE" type="rel1n" layer="0"
+ src-col-pattern="{sc}_{st}"
+ pk-pattern="{dt}_pk" uq-pattern="{dt}_uq"
+ src-fk-pattern="{st}_fk"
+ custom-color="#2f4da6"
+ src-table="public."METEO_STATION""
+ dst-table="public."METEO_FEATURE_VALUE""
+ src-required="false" dst-required="false"/>
+
+<relationship name="PARAMETER_has_many_METEO_FEATURE" type="rel1n" layer="0"
+ src-col-pattern="{sc}_{st}"
+ pk-pattern="{dt}_pk" uq-pattern="{dt}_uq"
+ src-fk-pattern="{st}_fk"
+ custom-color="#d02c85"
+ src-table="public."PARAMETER""
+ dst-table="public."METEO_FEATURE""
+ src-required="false" dst-required="false"/>
+
+</dbmodel>
--- /dev/null
+from .meteofrance import MeteoFrance
+
+from extomeAI.lib.connector import PostgreSQLDBConnection
+
+from csv import DictReader
+from logging.config import fileConfig
+from pathlib import Path
+
+import logging
+
+fileConfig((Path.cwd() / 'config') / 'logging.cfg')
+logger = logging.getLogger()
+
+with PostgreSQLDBConnection.Instance() as db:
+ db.cursor.execute('SELECT count(*) FROM "PARAMETER";')
+ nb_parameters = db.cursor.fetchone()[0]
+ if not nb_parameters:
+ logger.info('Inserting PARAMETER values from parameters.csv')
+ csv_file = Path.cwd() / 'config' / 'features' / 'parameters.csv'
+ with open(csv_file, "r") as f:
+ reader = DictReader(f, delimiter=',')
+ for row in reader:
+ request = f"""INSERT INTO "PARAMETER" ("PARAM_NAME")
+ VALUES ('{row['PARAM_NAME']}');"""
+ db.cursor.execute(request)
--- /dev/null
+from extomeAI.lib.connector import PostgreSQLDBConnection
+
+from configparser import ConfigParser
+from csv import DictReader
+from datetime import datetime, timedelta
+from os import remove, system, listdir
+from pathlib import Path
+from shutil import rmtree
+from timezonefinder import TimezoneFinder
+
+from logging.config import fileConfig
+from os.path import isfile, basename
+from urllib.request import urlretrieve
+
+import logging
+import gzip
+import pytz
+import tempfile
+
+fileConfig((Path.cwd() / 'config') / 'logging.cfg')
+logger = logging.getLogger()
+
+class MeteoFrance:
+ def __init__(self):
+ '''
+ Constructor
+ See: https://donneespubliques.meteofrance.fr/?fond=produit&id_produit=90&id_rubrique=32
+ '''
+ self.__data_directory = (Path.cwd() / 'data') / 'meteo_france'
+ # Re-creating data directory architecture for MeteoFrance, if asked
+ config = ConfigParser()
+ config.read((Path.cwd() / 'config') / 'features.cfg')
+ if eval(config['meteofrance']['regenerate']):
+ logger.info("Regenerating meteofrance data directory")
+ try:
+ rmtree(self.__data_directory)
+ except:
+ pass
+ p = Path(self.__data_directory / 'historical')
+ p.mkdir(exist_ok=True, parents=True)
+ p = Path(self.__data_directory / 'config')
+ p.mkdir(exist_ok=True, parents=True)
+ if eval(config['meteofrance']['reinsert']):
+ logger.info("Reinserting meteofrance database")
+ with PostgreSQLDBConnection.Instance() as db:
+ db.cursor.execute(f'DELETE FROM "METEO_FEATURE";')
+ db.cursor.execute(f'ALTER SEQUENCE "METEO_FEATURE_METFT_ID_seq" RESTART WITH 1;')
+ db.cursor.execute(f'DELETE FROM "METEO_STATION";')
+ db.cursor.execute(f'ALTER SEQUENCE "METEO_STATION_METST_ID_seq" RESTART WITH 1;')
+ db.cursor.execute(f'DELETE FROM "METEO_FEATURE_VALUE";')
+ db.cursor.execute(f'ALTER SEQUENCE "METEO_FEATURE_VALUE_METVA_ID_seq" RESTART WITH 1;')
+ self.__generate()
+
+
+ def __collect_stations(self):
+ '''
+ Filling METEO_STATION table from location schema
+ '''
+ tf = TimezoneFinder(in_memory=True)
+
+ with PostgreSQLDBConnection.Instance() as db:
+ link = 'https://donneespubliques.meteofrance.fr/donnees_libres/Txt/Synop/postesSynop.csv'
+ p = Path(self.__data_directory / 'config' )
+ csv_file = p / basename(link)
+ if not isfile(csv_file):
+ logger.info('Downloading location stations from MeteoFrance')
+ urlretrieve(link, csv_file)
+ with open(csv_file, "r") as f:
+ reader = DictReader(f, delimiter=';')
+ logger.info(f'Inserting location stations in {db.dbname} database')
+ for row in reader:
+ longitude, latitude = eval(row['Longitude']), eval(row['Latitude'])
+ point = (longitude, latitude)
+ timezone_name = tf.timezone_at(lng = eval(row['Longitude']), lat = eval(row['Latitude']))
+ if timezone_name is None:
+ timezone_name = tf.closest_timezone_at(lng = eval(row['Longitude']),
+ lat = eval(row['Latitude']),
+ delta_degree = 5)
+ cet = pytz.timezone(timezone_name)
+ dt = datetime.now()
+ offset = cet.utcoffset(dt, is_dst = True)
+ shift = int(offset / timedelta(hours=1))
+ request = f"""INSERT INTO "METEO_STATION" ("METST_NAME", "METST_IDNAME", "METST_LOCATION", "METST_TIMEZONE")
+ VALUES ('{row['Nom'].replace("'",'’')}', '{row['ID']}',
+ point({row['Latitude']}, {row['Longitude']}), {shift});"""
+ db.cursor.execute(request)
+
+
+ def __insert_features(self):
+ logger.info('Inserting MeteoFrance list of features from meteo_features.csv')
+ csv_file = Path.cwd() / 'config' / 'features' / 'meteofrance' / 'meteofrance_features.csv'
+ with PostgreSQLDBConnection.Instance() as db:
+ with open(csv_file, "r") as f:
+ reader = DictReader(f, delimiter=',')
+ next(reader)
+ for row in reader:
+ request = f"""INSERT INTO "METEO_FEATURE" ("METFT_NAME", "PARAM_ID_PARAMETER")
+ VALUES ('{row['METFT_NAME']}', {row['PARAM_ID_PARAMETER']});"""
+ db.cursor.execute(request)
+
+
+
+ def __collect_historical_data(self):
+ '''
+ We collect all csv files from January 1996 until the month
+ before now. The argument in the url to download are of the
+ form 201001 for January 2010. We start by computing all these
+ patterns, in historical list.
+ '''
+ # List of year-months to consider
+ historical = []
+ date_end = datetime.now()
+ for year in range(1996, date_end.year+1):
+ for month in range(1,13):
+ date = datetime(year, month, 1)
+ if date <= date_end:
+ historical.append(date.strftime("%Y%m"))
+
+ # We download all csv files from meteofrance that are not in
+ # the data repository
+ meteo_data = self.__data_directory / 'historical'
+ p = Path(meteo_data)
+ p.mkdir(exist_ok=True, parents=True)
+ for date in historical:
+ if not isfile(meteo_data / ('synop.'+date+'.csv')):
+ link = 'https://donneespubliques.meteofrance.fr/donnees_libres/Txt/Synop/Archive/synop.'
+ link += date + '.csv.gz'
+ download_path = meteo_data / basename(link)
+ urlretrieve(link, download_path)
+ with gzip.open(download_path, 'rb') as f:
+ csv_file = meteo_data / basename(link[:-3])
+ with open(csv_file, 'w') as g:
+ g.write(f.read().decode())
+ remove(meteo_data / basename(link))
+
+
+ def __from_date_to_datetz(self, date, a, b):
+ if not hasattr(self, '__meteo_station_tz'):
+ self.__meteo_station_tz = {}
+ tf = TimezoneFinder(in_memory=True)
+ with PostgreSQLDBConnection.Instance() as db:
+ db.cursor.execute('select "METST_IDNAME", "METST_LOCATION" from "METEO_STATION";')
+ list_of_rows = db.cursor.fetchall()
+ for k in list_of_rows:
+ print('\n',k)
+ longitude, latitude = eval(k[1])
+ print(longitude, latitude)
+ print(type(longitude))
+ timezone_name = tf.timezone_at(lng = longitude, lat = latitude)
+ if timezone_name is None:
+ timezone_name = tf.closest_timezone_at(lng = longitude,
+ lat = latitude,
+ delta_degree = 13,
+ exact_computation=True,
+ #return_distances=True,
+ force_evaluation=True)
+ cet = pytz.timezone(timezone_name)
+ dt = datetime.now()
+ offset = cet.utcoffset(dt, is_dst = True)
+ shift = int(offset / timedelta(hours=1))
+ self.__meteo_station_tz[k[0]] = shift
+
+ print(self.__meteo_station_tz)
+ exit()
+ '''longitude, latitude = eval(row['Longitude']), eval(row['Latitude'])
+ point = (longitude, latitude)
+ timezone_name = tf.timezone_at(lng = eval(row['Longitude']), lat = eval(row['Latitude']))
+ if timezone_name is None:
+ timezone_name = tf.closest_timezone_at(lng = eval(row['Longitude']),
+ lat = eval(row['Latitude']),
+ delta_degree = 5)
+ cet = pytz.timezone(timezone_name)
+ dt = datetime.now()
+ offset = cet.utcoffset(dt, is_dst = True)
+ shift = int(offset / timedelta(hours=1))
+
+ self.__meteo_station_tz'''
+ exit()
+ return date[:4]+'-'+date[4:6]+'-'+date[6:8]+' '+date[8:10]+':00:00+01'
+
+
+ def __insert_historical_data(self):
+ csv_file = Path.cwd() / 'config' / 'features' / 'meteofrance' / 'meteofrance_features.csv'
+ with PostgreSQLDBConnection.Instance() as db:
+ db.cursor.execute('SELECT * from "METEO_FEATURE";')
+ list_of_rows = db.cursor.fetchall()
+ dico = {u[1]:u[0] for u in list_of_rows}
+
+ with open(csv_file, "r") as f:
+ reader = DictReader(f, delimiter=',')
+ next(reader)
+ dico_features = {row["abbreviation"]:dico[row["METFT_NAME"]] for row in reader}
+
+ with PostgreSQLDBConnection.Instance() as db:
+ db.cursor.execute('SELECT * from "METEO_STATION";')
+ list_of_rows = db.cursor.fetchall()
+ dico_station = {u[2]:u[0] for u in list_of_rows}
+
+ for feature in dico_features:
+ logger.info(f'Integrating {[u for u in dico if dico[u]==dico_features[feature]][0]} feature')
+ for station in dico_station:
+ logger.info(f' - Dealing with meteo station n°: {station}')
+ csv_file = tempfile.NamedTemporaryFile('w')
+ dir_data = Path.cwd() / 'data' / 'meteo_france' / 'historical'
+ for csv_meteo in listdir(dir_data):
+ with open(dir_data / csv_meteo, "r") as f:
+ reader = DictReader(f, delimiter=';')
+ csv_file.write(''.join([row[feature]+",'"+self.__from_date_to_datetz(row["date"], station, dico_station[station])+"',"+str(dico_features[feature])+','+str(dico_station[station])+'\n' for row in reader if row['numer_sta'] == station]))
+ csv_file.flush()
+ with open(csv_file.name, 'r') as f:
+ with PostgreSQLDBConnection.Instance() as db:
+ db.cursor.copy_from(f, '"METEO_FEATURE_VALUE"', sep=',', null='mq',
+ columns=['"METVA_VALUE"','"METVA_DATETIME"','"METFT_ID_METEO_FEATURE"','"METST_ID_METEO_STATION"'])
+
+
+
+
+
+
+
+ def __generate(self):
+ # Meteo stations must be collected first, if not in the database
+ with PostgreSQLDBConnection.Instance() as db:
+ db.cursor.execute('SELECT count(*) FROM "METEO_STATION";')
+ updated_meteo_station = db.cursor.fetchone()[0]
+ if not updated_meteo_station:
+ self.__collect_stations()
+
+ # Features from data/meteo_france/config/meteo_features.csv
+ # must be inserted in the database, if not already done
+ with PostgreSQLDBConnection.Instance() as db:
+ db.cursor.execute('SELECT count(*) FROM "METEO_FEATURE";')
+ updated_meteo_features = db.cursor.fetchone()[0]
+ if not updated_meteo_features:
+ self.__insert_features()
+
+ # Downloading meteofrance historical csv files
+ logger.info('Downloading historical csv files from MeteoFrance, if needed')
+ self.__collect_historical_data()
+
+ self.__insert_historical_data()
+
+ def update(self):
+ '''
+ Update the MeteoFrance features with the last available data
+ '''
+ # We collect archive files from MeteoFrance, until the current month
+ # by using the same method than for data generation : this is currently
+ # based on the presence of a synop.+date+.csv' file in the
+ # data/meteo_france/historical directory. The file corresponding to the
+ # current month is deleted first, so that its most recent version will be downloaded
+ # by colling self.__collect_historical_data
+ # TODO: updates according to a PostgreSQL request ?
+ logger.info('Update historical csv files from MeteoFrance, if needed')
+ today = datetime.now()
+ todel = 'synop.'+today.strftime("%Y%m")+".csv"
+ remove(self.__data_directory / 'historical' / todel)
+ system("touch "+todel)
+ self.__collect_historical_data()
+ logger.info('Inserting csv files in database')
+ self.__insert_historical_data()
+
--- /dev/null
+from pathlib import Path
+from shutil import rmtree
+from configparser import ConfigParser
+from os import remove
+from subprocess import Popen, PIPE
+from sys import argv
+import logging
+from logging.config import fileConfig
+
+fileConfig((Path.cwd() / 'config') / 'logging.cfg')
+logger = logging.getLogger()
+
+argument = argv[-1]
+
+if argument in ['data', 'all']:
+ logger.info("Cleaning and restoring data directory")
+ directory = Path.cwd() / 'data'
+ if directory.is_dir():
+ rmtree(directory)
+ p = Path(Path.cwd() / 'data')
+ p.mkdir()
+
+# Cleaning the postgresql database
+if argument in ['db', 'all']:
+ config = ConfigParser()
+ config.read((Path.cwd() / 'config') / 'main.cfg')
+
+ host = config['postgresql']['host']
+ user = config['postgresql']['user']
+ port = config['postgresql']['port']
+ dbname = config['postgresql']['dbname']
+
+ logger.info("PostgreSQL database deletion")
+ command = ['dropdb', '-h', host, '-U', user, '-p', port, dbname]
+ process = Popen(command, stdout=PIPE, stderr=PIPE)
+ stdout, stderr = process.communicate()
+
+ logger.info("PostgreSQL database creation")
+ command = ['createdb', '-h', host, '-U', user, '-p', port, dbname]
+ process = Popen(command, stdout=PIPE, stderr=PIPE)
+ stdout, stderr = process.communicate()
--- /dev/null
+from pathlib import Path
+import psycopg2
+import configparser
+
+class Singleton:
+
+ def __init__(self, cls):
+ self._cls = cls
+
+ def Instance(self):
+ try:
+ return self._instance
+ except AttributeError:
+ self._instance = self._cls()
+ return self._instance
+
+ def __call__(self):
+ raise TypeError('Singletons must be accessed through `Instance()`.')
+
+ def __instancecheck__(self, inst):
+ return isinstance(inst, self._cls)
+
+@Singleton
+class PostgreSQLDBConnection(object):
+ """Postgresql database connection"""
+
+ def __init__(self, connection_string = ''):
+ if connection_string == '':
+ # We're retrieving information related to the database in config.ini
+ config = configparser.ConfigParser()
+ config.read((Path.cwd() / 'config') / 'main.cfg')
+
+ host = config['postgresql']['host']
+ user = config['postgresql']['user']
+ port = config['postgresql']['port']
+ self.dbname = config['postgresql']['dbname']
+
+ self.connection_string = f"host={host} port={port} dbname={self.dbname} user={user}"
+
+ else:
+ self.connection_string = connection_string
+ self.dbname = ''
+
+
+ def __enter__(self):
+ self.connection = psycopg2.connect(self.connection_string)
+ self.connection.autocommit = True
+ self.cursor = self.connection.cursor()
+ return self
+
+ @property
+ def name(self):
+ return self.dbname
+
+ def __str__(self):
+ return 'Database connection object'
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ #self.connection.commit()
+ self.cursor.close()
+ self.connection.close()
+
--- /dev/null
+from extomeAI.source import MeteoFrance
+
+from celery import Celery
+from configparser import ConfigParser
+from logging.config import fileConfig
+from logging import getLogger
+from pathlib import Path
+from shutil import rmtree
+from subprocess import Popen, PIPE
+
+
+fileConfig((Path.cwd() / 'config') / 'logging.cfg')
+logger = getLogger()
+
+
+class ExtomeEngine:
+ def __init__(self, clean = False):
+ logger.info("Extome-IA engine launched")
+ if clean:
+ self.clean()
+ print("Ne pas oublier d'exporter la BDD dans pgModeler")
+ print("Ni de copier l'archive dans la data")
+
+ def clean(self):
+ # Cleaning the data directory
+ logger.info("Cleaning and restoring data directory")
+ directory = Path.cwd() / 'data'
+ if directory.is_dir():
+ rmtree(directory)
+ p = Path(Path.cwd() / 'data')
+ p.mkdir()
+
+ # Cleaning the postgresql database
+ config = ConfigParser()
+ config.read((Path.cwd() / 'config') / 'main.cfg')
+
+ host = config['postgresql']['host']
+ user = config['postgresql']['user']
+ port = config['postgresql']['port']
+ dbname = config['postgresql']['dbname']
+
+ logger.info("PostgreSQL database deletion")
+ command = ['dropdb', '-h', host, '-U', user, '-p', port, dbname]
+ process = Popen(command, stdout=PIPE, stderr=PIPE)
+ process.communicate()
+
+ logger.info("PostgreSQL database creation")
+ command = ['createdb', '-h', host, '-U', user, '-p', port, dbname]
+ process = Popen(command, stdout=PIPE, stderr=PIPE)
+ process.communicate()
+
+ def add_meteofrance(self):
+ self.meteofrance = MeteoFrance()
+
+
+
+engine = ExtomeEngine(clean = False)
+engine.add_meteofrance()
\ No newline at end of file
--- /dev/null
+numpy==1.18.1
+scipy==1.4.1
+xgboost==0.90
--- /dev/null
+from __future__ import absolute_import
+from celery import Celery
+
+app = Celery('test_celery',
+ backend='amqp',
+ broker='amqp://guest@localhost//',
+ include=['test_celery.tasks'])
--- /dev/null
+from .tasks import longtime_add
+import time
+
+if __name__ == '__main__':
+ result = longtime_add.delay(1,2)
+ # at this time, our task is not finished, so it will return False
+ print ('Task finished? ', result.ready())
+ print ('Task result: ', result.result)
+ # sleep 10 seconds to ensure the task has been finished
+ time.sleep(10)
+ # now the task should be finished and ready method will return True
+ print ('Task finished? ', result.ready())
+ print ('Task result: ', result.result)
\ No newline at end of file
--- /dev/null
+from __future__ import absolute_import
+from test_celery.celery import app
+import time
+
+
+@app.task
+def longtime_add(x, y):
+ print ('long time task begins')
+ # sleep 5 seconds
+ time.sleep(5)
+ print ('long time task finished')
+ return x + y
\ No newline at end of file