]> AND Private Git Repository - predictops.git/blobdiff - main.py
Logo AND Algorithmique Numérique Distribuée

Private GIT Repository
Adding a source module to check for redundancy in feature names.
[predictops.git] / main.py
diff --git a/main.py b/main.py
index 3d3739fa46c6016c995f341d6140aec3bb58af49..9e35b2da9acf12c39b784454a8c420dae5ce2bb4 100644 (file)
--- a/main.py
+++ b/main.py
@@ -1,58 +1,41 @@
-from extomeAI.source import MeteoFrance
+from predictops.engine import Engine
+from predictops.learn.preprocessing import Preprocessing
+from predictops.target.all import All
+from predictops.target.toarea import ToArea
 
-from celery import Celery
-from configparser import ConfigParser
-from logging.config import fileConfig
 from logging import getLogger
+from logging.config import fileConfig
 from pathlib import Path
-from shutil import rmtree
-from subprocess import Popen, PIPE
 
+import geopandas as gpd
 
 fileConfig((Path.cwd() / 'config') / 'logging.cfg')
 logger = getLogger()
 
+if __name__ == '__main__':
+
+    config = (Path.cwd() / 'config') / 'learn.cfg'
+    engine = Engine(config_file = config)
+
+    engine.add_features()
+    #print(engine.X)
+    process = Preprocessing(config_file = config, dict_features = engine.X)
+
+
+    #print(engine.X[datetime.strptime('06/30/2010 21:00:00', '%m/%d/%Y %H:%M:%S')])
+    print(process.dataframe.head(n=20))
+    print(process.dataframe.tail(n=20))
+
+
+    target = All(stream_file = Path.cwd() / 'data' / 'targets' / 'sdis25' / 'interventions.csv')
+
+
+    exit()
+
+    depts = gpd.read_file( Path.cwd() / 'data' / 'targets' / 'departments' / "departements-20180101.shp")
+    Doubs = depts.loc[depts['nom'] == 'Doubs'].iloc[0]
+
+    ToArea(area=Doubs.geometry,
+           start = start, end = end,
+           csv_file = Path.cwd() / 'data' / 'targets' / 'sdis25' / 'interventions.csv')
 
-class ExtomeEngine:
-    def __init__(self, clean = False):
-        logger.info("Extome-IA engine launched")
-        if clean:
-            self.clean()
-            print("Ne pas oublier d'exporter la BDD dans pgModeler")
-            print("Ni de copier l'archive dans la data")
-    
-    def clean(self):
-        # Cleaning the data directory
-        logger.info("Cleaning and restoring data directory")
-        directory  = Path.cwd() / 'data'
-        if directory.is_dir():
-            rmtree(directory)
-        p = Path(Path.cwd() / 'data')
-        p.mkdir()
-    
-        # Cleaning the postgresql database
-        config = ConfigParser()
-        config.read((Path.cwd() / 'config') / 'main.cfg')
-        
-        host   = config['postgresql']['host']
-        user   = config['postgresql']['user']
-        port   = config['postgresql']['port']
-        dbname = config['postgresql']['dbname']
-        
-        logger.info("PostgreSQL database deletion")
-        command = ['dropdb', '-h', host, '-U', user, '-p', port, dbname]
-        process = Popen(command, stdout=PIPE, stderr=PIPE)
-        process.communicate()
-        
-        logger.info("PostgreSQL database creation")
-        command = ['createdb', '-h', host, '-U', user, '-p', port, dbname]
-        process = Popen(command, stdout=PIPE, stderr=PIPE)
-        process.communicate() 
-    
-    def add_meteofrance(self):
-        self.meteofrance = MeteoFrance()
-        
-        
-
-engine = ExtomeEngine(clean = False)
-engine.add_meteofrance()
\ No newline at end of file