Remove unrelated scripts (#18219)

* Remove influxDB scripts

* Remove ancient db migrator

* Update requirements
This commit is contained in:
Paulus Schoutsen 2018-11-05 16:14:34 +01:00 committed by GitHub
parent f9f53fd278
commit 81fa74e5ca
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 0 additions and 667 deletions

View File

@ -1,191 +0,0 @@
"""Script to convert an old-format home-assistant.db to a new format one."""
import argparse
import os.path
import sqlite3
import sys
from datetime import datetime
from typing import Optional, List
import homeassistant.config as config_util
import homeassistant.util.dt as dt_util
# pylint: disable=unused-import
from homeassistant.components.recorder import REQUIREMENTS # NOQA
def ts_to_dt(timestamp: Optional[float]) -> Optional[datetime]:
"""Turn a datetime into an integer for in the DB."""
if timestamp is None:
return None
return dt_util.utc_from_timestamp(timestamp)
# Based on code at
# http://stackoverflow.com/questions/3173320/text-progress-bar-in-the-console
def print_progress(iteration: int, total: int, prefix: str = '',
suffix: str = '', decimals: int = 2,
bar_length: int = 68) -> None:
"""Print progress bar.
Call in a loop to create terminal progress bar
@params:
iteration - Required : current iteration (Int)
total - Required : total iterations (Int)
prefix - Optional : prefix string (Str)
suffix - Optional : suffix string (Str)
decimals - Optional : number of decimals in percent complete (Int)
barLength - Optional : character length of bar (Int)
"""
filled_length = int(round(bar_length * iteration / float(total)))
percents = round(100.00 * (iteration / float(total)), decimals)
line = '#' * filled_length + '-' * (bar_length - filled_length)
sys.stdout.write('%s [%s] %s%s %s\r' % (prefix, line,
percents, '%', suffix))
sys.stdout.flush()
if iteration == total:
print("\n")
def run(script_args: List) -> int:
"""Run the actual script."""
# pylint: disable=invalid-name
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from homeassistant.components.recorder import models
parser = argparse.ArgumentParser(
description="Migrate legacy DB to SQLAlchemy format.")
parser.add_argument(
'-c', '--config',
metavar='path_to_config_dir',
default=config_util.get_default_config_dir(),
help="Directory that contains the Home Assistant configuration")
parser.add_argument(
'-a', '--append',
action='store_true',
default=False,
help="Append to existing new format SQLite database")
parser.add_argument(
'--uri',
type=str,
help="Connect to URI and import (implies --append)"
"eg: mysql://localhost/homeassistant")
parser.add_argument(
'--script',
choices=['db_migrator'])
args = parser.parse_args()
config_dir = os.path.join(os.getcwd(), args.config) # type: str
# Test if configuration directory exists
if not os.path.isdir(config_dir):
if config_dir != config_util.get_default_config_dir():
print(('Fatal Error: Specified configuration directory does '
'not exist {} ').format(config_dir))
return 1
src_db = '{}/home-assistant.db'.format(config_dir)
dst_db = '{}/home-assistant_v2.db'.format(config_dir)
if not os.path.exists(src_db):
print("Fatal Error: Old format database '{}' does not exist".format(
src_db))
return 1
if not args.uri and (os.path.exists(dst_db) and not args.append):
print("Fatal Error: New format database '{}' exists already - "
"Remove it or use --append".format(dst_db))
print("Note: --append must maintain an ID mapping and is much slower"
"and requires sufficient memory to track all event IDs")
return 1
conn = sqlite3.connect(src_db)
uri = args.uri or "sqlite:///{}".format(dst_db)
engine = create_engine(uri, echo=False)
models.Base.metadata.create_all(engine)
session_factory = sessionmaker(bind=engine)
session = session_factory()
append = args.append or args.uri
c = conn.cursor()
c.execute("SELECT count(*) FROM recorder_runs")
num_rows = c.fetchone()[0]
print("Converting {} recorder_runs".format(num_rows))
c.close()
c = conn.cursor()
n = 0
for row in c.execute("SELECT * FROM recorder_runs"): # type: ignore
n += 1
session.add(models.RecorderRuns(
start=ts_to_dt(row[1]),
end=ts_to_dt(row[2]),
closed_incorrect=row[3],
created=ts_to_dt(row[4])
))
if n % 1000 == 0:
session.commit()
print_progress(n, num_rows)
print_progress(n, num_rows)
session.commit()
c.close()
c = conn.cursor()
c.execute("SELECT count(*) FROM events")
num_rows = c.fetchone()[0]
print("Converting {} events".format(num_rows))
c.close()
id_mapping = {}
c = conn.cursor()
n = 0
for row in c.execute("SELECT * FROM events"): # type: ignore
n += 1
o = models.Events(
event_type=row[1],
event_data=row[2],
origin=row[3],
created=ts_to_dt(row[4]),
time_fired=ts_to_dt(row[5]),
)
session.add(o)
if append:
session.flush()
id_mapping[row[0]] = o.event_id
if n % 1000 == 0:
session.commit()
print_progress(n, num_rows)
print_progress(n, num_rows)
session.commit()
c.close()
c = conn.cursor()
c.execute("SELECT count(*) FROM states")
num_rows = c.fetchone()[0]
print("Converting {} states".format(num_rows))
c.close()
c = conn.cursor()
n = 0
for row in c.execute("SELECT * FROM states"): # type: ignore
n += 1
session.add(models.States(
entity_id=row[1],
state=row[2],
attributes=row[3],
last_changed=ts_to_dt(row[4]),
last_updated=ts_to_dt(row[5]),
event_id=id_mapping.get(row[6], row[6]),
domain=row[7]
))
if n % 1000 == 0:
session.commit()
print_progress(n, num_rows)
print_progress(n, num_rows)
session.commit()
c.close()
return 0

View File

@ -1,281 +0,0 @@
"""Script to import recorded data into an Influx database."""
import argparse
import json
import os
import sys
from typing import List
import homeassistant.config as config_util
def run(script_args: List) -> int:
"""Run the actual script."""
from sqlalchemy import create_engine
from sqlalchemy import func
from sqlalchemy.orm import sessionmaker
from influxdb import InfluxDBClient
from homeassistant.components.recorder import models
from homeassistant.helpers import state as state_helper
from homeassistant.core import State
from homeassistant.core import HomeAssistantError
parser = argparse.ArgumentParser(
description="import data to influxDB.")
parser.add_argument(
'-c', '--config',
metavar='path_to_config_dir',
default=config_util.get_default_config_dir(),
help="Directory that contains the Home Assistant configuration")
parser.add_argument(
'--uri',
type=str,
help="Connect to URI and import (if other than default sqlite) "
"eg: mysql://localhost/homeassistant")
parser.add_argument(
'-d', '--dbname',
metavar='dbname',
required=True,
help="InfluxDB database name")
parser.add_argument(
'-H', '--host',
metavar='host',
default='127.0.0.1',
help="InfluxDB host address")
parser.add_argument(
'-P', '--port',
metavar='port',
default=8086,
help="InfluxDB host port")
parser.add_argument(
'-u', '--username',
metavar='username',
default='root',
help="InfluxDB username")
parser.add_argument(
'-p', '--password',
metavar='password',
default='root',
help="InfluxDB password")
parser.add_argument(
'-s', '--step',
metavar='step',
default=1000,
help="How many points to import at the same time")
parser.add_argument(
'-t', '--tags',
metavar='tags',
default="",
help="Comma separated list of tags (key:value) for all points")
parser.add_argument(
'-D', '--default-measurement',
metavar='default_measurement',
default="",
help="Store all your points in the same measurement")
parser.add_argument(
'-o', '--override-measurement',
metavar='override_measurement',
default="",
help="Store all your points in the same measurement")
parser.add_argument(
'-e', '--exclude_entities',
metavar='exclude_entities',
default="",
help="Comma separated list of excluded entities")
parser.add_argument(
'-E', '--exclude_domains',
metavar='exclude_domains',
default="",
help="Comma separated list of excluded domains")
parser.add_argument(
"-S", "--simulate",
default=False,
action="store_true",
help=("Do not write points but simulate preprocessing and print "
"statistics"))
parser.add_argument(
'--script',
choices=['influxdb_import'])
args = parser.parse_args()
simulate = args.simulate
client = None
if not simulate:
client = InfluxDBClient(
args.host, args.port, args.username, args.password)
client.switch_database(args.dbname)
config_dir = os.path.join(os.getcwd(), args.config) # type: str
# Test if configuration directory exists
if not os.path.isdir(config_dir):
if config_dir != config_util.get_default_config_dir():
print(('Fatal Error: Specified configuration directory does '
'not exist {} ').format(config_dir))
return 1
src_db = '{}/home-assistant_v2.db'.format(config_dir)
if not os.path.exists(src_db) and not args.uri:
print("Fatal Error: Database '{}' does not exist "
"and no URI given".format(src_db))
return 1
uri = args.uri or 'sqlite:///{}'.format(src_db)
engine = create_engine(uri, echo=False)
session_factory = sessionmaker(bind=engine)
session = session_factory()
step = int(args.step)
step_start = 0
tags = {}
if args.tags:
tags.update(dict(elem.split(':') for elem in args.tags.split(',')))
excl_entities = args.exclude_entities.split(',')
excl_domains = args.exclude_domains.split(',')
override_measurement = args.override_measurement
default_measurement = args.default_measurement
# pylint: disable=assignment-from-no-return
query = session.query(func.count(models.Events.event_type)).filter(
models.Events.event_type == 'state_changed')
total_events = query.scalar()
prefix_format = '{} of {}'
points = []
invalid_points = []
count = 0
from collections import defaultdict
entities = defaultdict(int)
print_progress(0, total_events, prefix_format.format(0, total_events))
while True:
step_stop = step_start + step
if step_start > total_events:
print_progress(total_events, total_events, prefix_format.format(
total_events, total_events))
break
query = session.query(models.Events).filter(
models.Events.event_type == 'state_changed').order_by(
models.Events.time_fired).slice(step_start, step_stop)
for event in query:
event_data = json.loads(event.event_data)
if not ('entity_id' in event_data) or (
excl_entities and event_data[
'entity_id'] in excl_entities) or (
excl_domains and event_data[
'entity_id'].split('.')[0] in excl_domains):
session.expunge(event)
continue
try:
state = State.from_dict(event_data.get('new_state'))
except HomeAssistantError:
invalid_points.append(event_data)
if not state:
invalid_points.append(event_data)
continue
try:
_state = float(state_helper.state_as_number(state))
_state_key = 'value'
except ValueError:
_state = state.state
_state_key = 'state'
if override_measurement:
measurement = override_measurement
else:
measurement = state.attributes.get('unit_of_measurement')
if measurement in (None, ''):
if default_measurement:
measurement = default_measurement
else:
measurement = state.entity_id
point = {
'measurement': measurement,
'tags': {
'domain': state.domain,
'entity_id': state.object_id,
},
'time': event.time_fired,
'fields': {
_state_key: _state,
}
}
for key, value in state.attributes.items():
if key != 'unit_of_measurement':
# If the key is already in fields
if key in point['fields']:
key = key + '_'
# Prevent column data errors in influxDB.
# For each value we try to cast it as float
# But if we can not do it we store the value
# as string add "_str" postfix to the field key
try:
point['fields'][key] = float(value)
except (ValueError, TypeError):
new_key = '{}_str'.format(key)
point['fields'][new_key] = str(value)
entities[state.entity_id] += 1
point['tags'].update(tags)
points.append(point)
session.expunge(event)
if points:
if not simulate:
client.write_points(points)
count += len(points)
# This prevents the progress bar from going over 100% when
# the last step happens
print_progress((step_start + len(
points)), total_events, prefix_format.format(
step_start, total_events))
else:
print_progress(
(step_start + step), total_events, prefix_format.format(
step_start, total_events))
points = []
step_start += step
print("\nStatistics:")
print("\n".join(["{:6}: {}".format(v, k) for k, v
in sorted(entities.items(), key=lambda x: x[1])]))
print("\nInvalid Points: {}".format(len(invalid_points)))
print("\nImport finished: {} points written".format(count))
return 0
# Based on code at
# http://stackoverflow.com/questions/3173320/text-progress-bar-in-the-console
def print_progress(iteration: int, total: int, prefix: str = '',
suffix: str = '', decimals: int = 2,
bar_length: int = 68) -> None:
"""Print progress bar.
Call in a loop to create terminal progress bar
@params:
iteration - Required : current iteration (Int)
total - Required : total iterations (Int)
prefix - Optional : prefix string (Str)
suffix - Optional : suffix string (Str)
decimals - Optional : number of decimals in percent complete (Int)
barLength - Optional : character length of bar (Int)
"""
filled_length = int(round(bar_length * iteration / float(total)))
percents = round(100.00 * (iteration / float(total)), decimals)
line = '#' * filled_length + '-' * (bar_length - filled_length)
sys.stdout.write('%s [%s] %s%s %s\r' % (prefix, line,
percents, '%', suffix))
sys.stdout.flush()
if iteration == total:
print('\n')

View File

@ -1,193 +0,0 @@
"""Script to convert an old-structure influxdb to a new one."""
import argparse
import sys
from typing import List
# Based on code at
# http://stackoverflow.com/questions/3173320/text-progress-bar-in-the-console
def print_progress(iteration: int, total: int, prefix: str = '',
suffix: str = '', decimals: int = 2,
bar_length: int = 68) -> None:
"""Print progress bar.
Call in a loop to create terminal progress bar
@params:
iteration - Required : current iteration (Int)
total - Required : total iterations (Int)
prefix - Optional : prefix string (Str)
suffix - Optional : suffix string (Str)
decimals - Optional : number of decimals in percent complete (Int)
barLength - Optional : character length of bar (Int)
"""
filled_length = int(round(bar_length * iteration / float(total)))
percents = round(100.00 * (iteration / float(total)), decimals)
line = '#' * filled_length + '-' * (bar_length - filled_length)
sys.stdout.write('%s [%s] %s%s %s\r' % (prefix, line,
percents, '%', suffix))
sys.stdout.flush()
if iteration == total:
print("\n")
def run(script_args: List) -> int:
"""Run the actual script."""
from influxdb import InfluxDBClient
parser = argparse.ArgumentParser(
description="Migrate legacy influxDB.")
parser.add_argument(
'-d', '--dbname',
metavar='dbname',
required=True,
help="InfluxDB database name")
parser.add_argument(
'-H', '--host',
metavar='host',
default='127.0.0.1',
help="InfluxDB host address")
parser.add_argument(
'-P', '--port',
metavar='port',
default=8086,
help="InfluxDB host port")
parser.add_argument(
'-u', '--username',
metavar='username',
default='root',
help="InfluxDB username")
parser.add_argument(
'-p', '--password',
metavar='password',
default='root',
help="InfluxDB password")
parser.add_argument(
'-s', '--step',
metavar='step',
default=1000,
help="How many points to migrate at the same time")
parser.add_argument(
'-o', '--override-measurement',
metavar='override_measurement',
default="",
help="Store all your points in the same measurement")
parser.add_argument(
'-D', '--delete',
action='store_true',
default=False,
help="Delete old database")
parser.add_argument(
'--script',
choices=['influxdb_migrator'])
args = parser.parse_args()
# Get client for old DB
client = InfluxDBClient(args.host, args.port,
args.username, args.password)
client.switch_database(args.dbname)
# Get DB list
db_list = [db['name'] for db in client.get_list_database()]
# Get measurements of the old DB
res = client.query('SHOW MEASUREMENTS')
measurements = [measurement['name'] for measurement in res.get_points()]
nb_measurements = len(measurements)
# Move data
# Get old DB name
old_dbname = "{}__old".format(args.dbname)
# Create old DB if needed
if old_dbname not in db_list:
client.create_database(old_dbname)
# Copy data to the old DB
print("Cloning from {} to {}".format(args.dbname, old_dbname))
for index, measurement in enumerate(measurements):
client.query('''SELECT * INTO {}..:MEASUREMENT FROM '''
'"{}" GROUP BY *'.format(old_dbname, measurement))
# Print progress
print_progress(index + 1, nb_measurements)
# Delete the database
client.drop_database(args.dbname)
# Create new DB if needed
client.create_database(args.dbname)
client.switch_database(old_dbname)
# Get client for new DB
new_client = InfluxDBClient(args.host, args.port, args.username,
args.password, args.dbname)
# Counter of points without time
point_wt_time = 0
print("Migrating from {} to {}".format(old_dbname, args.dbname))
# Walk into measurement
for index, measurement in enumerate(measurements):
# Get tag list
res = client.query('''SHOW TAG KEYS FROM "{}"'''.format(measurement))
tags = [v['tagKey'] for v in res.get_points()]
# Get field list
res = client.query('''SHOW FIELD KEYS FROM "{}"'''.format(measurement))
fields = [v['fieldKey'] for v in res.get_points()]
# Get points, convert and send points to the new DB
offset = 0
while True:
nb_points = 0
# Prepare new points
new_points = []
# Get points
res = client.query('SELECT * FROM "{}" LIMIT {} OFFSET '
'{}'.format(measurement, args.step, offset))
for point in res.get_points():
new_point = {"tags": {},
"fields": {},
"time": None}
if args.override_measurement:
new_point["measurement"] = args.override_measurement
else:
new_point["measurement"] = measurement
# Check time
if point["time"] is None:
# Point without time
point_wt_time += 1
print("Can not convert point without time")
continue
# Convert all fields
for field in fields:
try:
new_point["fields"][field] = float(point[field])
except (ValueError, TypeError):
if field == "value":
new_key = "state"
else:
new_key = "{}_str".format(field)
new_point["fields"][new_key] = str(point[field])
# Add tags
for tag in tags:
new_point["tags"][tag] = point[tag]
# Set time
new_point["time"] = point["time"]
# Add new point to the new list
new_points.append(new_point)
# Count nb points
nb_points += 1
# Send to the new db
try:
new_client.write_points(new_points)
except Exception as exp:
raise exp
# If there is no points
if nb_points == 0:
# print("Measurement {} migrated".format(measurement))
break
else:
# Increment offset
offset += args.step
# Print progress
print_progress(index + 1, nb_measurements)
# Delete database if needed
if args.delete:
print("Dropping {}".format(old_dbname))
client.drop_database(old_dbname)

View File

@ -1426,7 +1426,6 @@ spotcrime==1.0.3
spotipy-homeassistant==2.4.4.dev1
# homeassistant.components.recorder
# homeassistant.scripts.db_migrator
# homeassistant.components.sensor.sql
sqlalchemy==1.2.13

View File

@ -232,7 +232,6 @@ smhi-pkg==1.0.5
somecomfort==0.5.2
# homeassistant.components.recorder
# homeassistant.scripts.db_migrator
# homeassistant.components.sensor.sql
sqlalchemy==1.2.13