Commit d475c625 by Nicolas Joyard

Merge branch 'feat.numeroDocument', closes !3

parents 9b3a8cab bc24b854
......@@ -5,3 +5,4 @@
data/*.json
data/*.zip
reference
......@@ -7,9 +7,9 @@ Data parlementaire.
Les données proviennent des portails Open Data parlementaires :
* [data.assemblee-nationale.fr](http://data.assemblee-nationale.fr/)
* [data.senat.fr](http://data.senat.fr/)
ParlAPI est conçu de manière à respecter le plus fidèlement possible le schéma
de données des sources parlementaires. En particulier, aucune donnée calculée
de données des sources parlementaires. En particulier, aucune donnée calculée
n'est ajoutée au modèle.
## Installation
......@@ -26,7 +26,7 @@ n'est ajoutée au modèle.
### Installation
```bash
$ git clone https://github.com/regardscitoyens/parlapi
$ git clone https://git.regardscitoyens.org/regardscitoyens/parlapi.git
$ cd parlapi
$ virtualenv ve
$ source ve/bin/activate
......@@ -39,10 +39,17 @@ $ psql -c "create database parlapi with owner parlapi;"
```bash
$ parlapi createdb
$ parlapi db upgrade
$ parlapi update_amo_an
$ parlapi runserver
```
### Mise à jour d'une installation existante
```bash
$ parlapi db upgrade
```
### Déploiement Openshift
```bash
......@@ -60,5 +67,16 @@ $ git push --force openshift openshift:master
Voir le rôle `parlapi` dans le [playbook citoyen][gh-playbook]
## Développement
### Génération de nouvelles migrations
Après avoir modifié les modèles Python :
```bash
$ parlapi db migrate
$ parlapi db upgrade
```
[gh-playbook]: https://github.com/regardscitoyens/playbook-citoyen
# A generic, single database configuration.
[alembic]
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S
from __future__ import with_statement
from alembic import context
from sqlalchemy import engine_from_config, pool
from logging.config import fileConfig
import logging
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
logger = logging.getLogger('alembic.env')
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
from flask import current_app
config.set_main_option('sqlalchemy.url',
current_app.config.get('SQLALCHEMY_DATABASE_URI'))
target_metadata = current_app.extensions['migrate'].db.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(url=url)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
# this callback is used to prevent an auto-migration from being generated
# when there are no changes to the schema
# reference: http://alembic.readthedocs.org/en/latest/cookbook.html
def process_revision_directives(context, revision, directives):
if getattr(config.cmd_opts, 'autogenerate', False):
script = directives[0]
if script.upgrade_ops.is_empty():
directives[:] = []
logger.info('No changes in schema detected.')
engine = engine_from_config(config.get_section(config.config_ini_section),
prefix='sqlalchemy.',
poolclass=pool.NullPool)
connection = engine.connect()
context.configure(connection=connection,
target_metadata=target_metadata,
process_revision_directives=process_revision_directives,
**current_app.extensions['migrate'].configure_args)
try:
with context.begin_transaction():
context.run_migrations()
finally:
connection.close()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade():
${upgrades if upgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}
"""empty message
Revision ID: 55c1e64eaf9f
Revises:
Create Date: 2017-03-21 11:38:18.839895
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '55c1e64eaf9f'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('documents', sa.Column('notice_numero', sa.Unicode(), nullable=True))
conn = op.get_bind()
conn.execute("""
CREATE OR REPLACE FUNCTION public.documents_search_vector_update()
RETURNS trigger AS
$BODY$
BEGIN
NEW.search_vector =
(
(
(
(
(
(
to_tsvector('pg_catalog.french', regexp_replace(coalesce(NEW.titre, ''), '[-@.]', ' ', 'g'))
|| to_tsvector('pg_catalog.french', regexp_replace(coalesce(NEW.type_code, ''), '[-@.]', ' ', 'g'))
)
|| to_tsvector('pg_catalog.french', regexp_replace(coalesce(NEW.type_libelle, ''), '[-@.]', ' ', 'g'))
)
|| to_tsvector('pg_catalog.french', regexp_replace(coalesce(NEW.soustype_code, ''), '[-@.]', ' ', 'g'))
)
|| to_tsvector('pg_catalog.french', regexp_replace(coalesce(NEW.soustype_libelle, ''), '[-@.]', ' ', 'g'))
)
|| to_tsvector('pg_catalog.french', regexp_replace(coalesce(NEW.statut_adoption, ''), '[-@.]', ' ', 'g'))
)
|| to_tsvector('pg_catalog.french', regexp_replace(coalesce(NEW.denomination_structurelle, ''), '[-@.]', ' ', 'g'))
)
|| to_tsvector('pg_catalog.french', regexp_replace(coalesce(NEW.notice_numero, ''), '[-@.]', ' ', 'g'))
;
RETURN NEW;
END
$BODY$
LANGUAGE plpgsql VOLATILE
COST 100;
ALTER FUNCTION public.documents_search_vector_update()
OWNER TO parlapi;
""")
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
conn.execute("""
CREATE OR REPLACE FUNCTION public.documents_search_vector_update()
RETURNS trigger AS
$BODY$
BEGIN
NEW.search_vector =
(
(
(
(
(
to_tsvector('pg_catalog.french', regexp_replace(coalesce(NEW.titre, ''), '[-@.]', ' ', 'g'))
|| to_tsvector('pg_catalog.french', regexp_replace(coalesce(NEW.type_code, ''), '[-@.]', ' ', 'g'))
)
|| to_tsvector('pg_catalog.french', regexp_replace(coalesce(NEW.type_libelle, ''), '[-@.]', ' ', 'g'))
)
|| to_tsvector('pg_catalog.french', regexp_replace(coalesce(NEW.soustype_code, ''), '[-@.]', ' ', 'g'))
)
|| to_tsvector('pg_catalog.french', regexp_replace(coalesce(NEW.soustype_libelle, ''), '[-@.]', ' ', 'g'))
)
|| to_tsvector('pg_catalog.french', regexp_replace(coalesce(NEW.statut_adoption, ''), '[-@.]', ' ', 'g'))
)
|| to_tsvector('pg_catalog.french', regexp_replace(coalesce(NEW.denomination_structurelle, ''), '[-@.]', ' ', 'g'))
;
RETURN NEW;
END
$BODY$
LANGUAGE plpgsql VOLATILE
COST 100;
ALTER FUNCTION public.documents_search_vector_update()
OWNER TO parlapi;
""")
op.drop_column('documents', 'notice_numero')
# ### end Alembic commands ###
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import click
from flask_migrate import Migrate, MigrateCommand
from flask_script import Manager
@click.group()
def cli():
pass
from .parlapi import app
from .models import db
@cli.command(short_help=u'Exécute le serveur web flask intégré')
manager = Manager(app)
#
# Run server
#
@manager.command
def runserver():
from .parlapi import app
u"Exécute le serveur web flask intégré"
app.run()
@cli.command(short_help=u'Crée le schéma BDD')
#
# Manage database
#
@manager.command
def createdb():
from .parlapi import app
from .models import db
u'Crée le schéma BDD'
with app.app_context():
db.configure_mappers()
db.create_all()
db.configure_mappers()
db.create_all()
@cli.command(short_help=u'Supprime le schéma BDD')
def dropdb():
from .parlapi import app
from .models import db
@manager.command
@manager.option('-o', '--output', dest='output')
def erdiagram(output):
from eralchemy import render_er
with app.app_context():
db.drop_all()
render_er(db.Model, output)
@cli.command(short_help=u'Génère un diagramme ER de la BDD')
@click.option('--output')
def erdiagram(output):
from eralchemy import render_er
from .parlapi import app
from .models import db
migrate = Migrate(app, db)
manager.add_command('db', MigrateCommand)
with app.app_context():
render_er(db.Model, output)
#
# Run import jobs
#
@cli.command(short_help=u'Met à jour acteurs, mandats, organes depuis l\'AN')
@click.option('--force', is_flag=True)
@click.option('--file', default=None)
def update_amo_an(force, file):
from .parlapi import app
@manager.command
@manager.option('-f', '--file', dest='file', default=None)
def update_amo_an(file=None):
u"Met à jour acteurs, mandats, organes depuis l'AN"
from .jobs.an_amo import run
with app.app_context():
app.config.update(SQLALCHEMY_ECHO=False)
run(app, force, file)
app.config.update(SQLALCHEMY_ECHO=False)
run(app, False, file)
@cli.command(short_help=u'Met à jour dossiers, documents, actes depuis l\'AN')
@click.option('--force', is_flag=True)
@click.option('--file', default=None)
def update_dossiers_an(force, file):
from .parlapi import app
@manager.command
@manager.option('-f', '--file', dest='file', default=None)
def update_dossiers_an(file=None):
u"Met à jour dossiers, documents, actes depuis l'AN"
from .jobs.an_dossiers import run
with app.app_context():
app.config.update(SQLALCHEMY_ECHO=False)
run(app, force, file)
app.config.update(SQLALCHEMY_ECHO=False)
run(app, False, file)
@cli.command(short_help=u'Met à jour amendements depuis l\'AN')
@click.option('--force', is_flag=True)
@click.option('--file', default=None)
def update_amendements_an(force, file):
from .parlapi import app
@manager.command
@manager.option('-f', '--file', dest='file', default=None)
def update_amendements_an(file=None):
u"Met à jour amendements depuis l'AN"
from .jobs.an_amendements import run
with app.app_context():
app.config.update(SQLALCHEMY_ECHO=False)
run(app, force, file)
app.config.update(SQLALCHEMY_ECHO=False)
run(app, False, file)
@cli.command(short_help=u'Met à jour scrutins depuis l\'AN')
@click.option('--force', is_flag=True)
@click.option('--file', default=None)
def update_scrutins_an(force, file):
from .parlapi import app
@manager.command
@manager.option('-f', '--file', dest='file', default=None)
def update_scrutins_an(file=None):
u"Met à jour scrutins depuis l'AN"
from .jobs.an_scrutins import run
with app.app_context():
app.config.update(SQLALCHEMY_ECHO=False)
run(app, force, file)
app.config.update(SQLALCHEMY_ECHO=False)
run(app, False, file)
@cli.command(short_help=u'Met à jour réunions depuis l\'AN')
@click.option('--force', is_flag=True)
@click.option('--file', default=None)
def update_reunions_an(force, file):
from .parlapi import app
@manager.command
@manager.option('-f', '--file', dest='file', default=None)
def update_reunions_an(file=None):
u"Met à jour réunions depuis l'AN"
from .jobs.an_reunions import run
with app.app_context():
app.config.update(SQLALCHEMY_ECHO=False)
run(app, force, file)
app.config.update(SQLALCHEMY_ECHO=False)
run(app, False, file)
if __name__ == '__main__':
cli()
manager.run()
......@@ -41,6 +41,7 @@ class ImportDossiersJob(BaseANJob):
chrono = json['cycleDeVie']['chrono']
klass = json['classification']
notice = json['notice']
data = {
'acteurs': [],
......@@ -52,6 +53,7 @@ class ImportDossiersJob(BaseANJob):
'divisions': [],
'dossier': None,
'legislature': None,
'notice_numero': notice['numNotice'],
'organes': [],
'soustype_code': None,
'soustype_libelle': None,
......
......@@ -246,6 +246,7 @@ class Document(db.Model):
date_publication = db.Column(db.Date)
date_publication_web = db.Column(db.Date)
notice_numero = db.Column(db.Unicode)
titre = db.Column(db.Unicode)
denomination_structurelle = db.Column(db.Unicode)
type_code = db.Column(db.Unicode)
......@@ -275,8 +276,9 @@ class Document(db.Model):
dossier_id = db.Column(db.Unicode, db.ForeignKey('dossiers.id'))
dossier = db.relationship('Dossier', back_populates='documents')
search_vector = db.Column(TSVectorType('titre', 'type_code',
'type_libelle', 'soustype_code',
search_vector = db.Column(TSVectorType('titre', 'notice_numero',
'type_code', 'type_libelle',
'soustype_code',
'soustype_libelle',
'statut_adoption',
'denomination_structurelle'))
......
......@@ -4,6 +4,7 @@ import os
from flask import Flask
from flaskext.markdown import Markdown
from flask_migrate import Migrate
def setup_app(name):
......@@ -19,6 +20,8 @@ def setup_app(name):
from .models import db
db.init_app(app)
migrate = Migrate(app, db)
# Setup REST API
from .rest.setup import setup_an_api as setup_an_rest_api
an_rest_api = setup_an_rest_api(app)
......
......@@ -27,6 +27,7 @@ setup(
'flask-graphql>=1.3,<2',
'flask-markdown>=0.3,<0.4',
'flask-marshmallow>=0.7,<0.8',
'flask-script>=2.0,<3',
'flask-sqlalchemy>=2.1,<3',
'graphene_sqlalchemy>=1.0,<2',
'html5lib>=0.9999999,<1',
......@@ -37,6 +38,7 @@ setup(
'pycparser==2.13', # 2.14 has CFFI bug
'requests>=2.10,<3',
'sqlalchemy-searchable>=0.10,<1',
'Flask-Migrate>=2.0,<3',
],
classifiers=[
"Development Status :: 3 - Alpha",
......@@ -46,6 +48,6 @@ setup(
],
entry_points='''
[console_scripts]
parlapi=parlapi.cli:cli
parlapi=parlapi.cli:manager.run
'''
)
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment