Coverage for mindsdb / migrations / versions / 2024-11-28_a8a3fac369e7_llm_log_json_in_out.py: 16%

57 statements  

« prev     ^ index     » next       coverage.py v7.13.1, created at 2026-01-21 00:36 +0000

1"""llm_log_json_in_out 

2 

3Revision ID: a8a3fac369e7 

4Revises: 0f89b523f346 

5Create Date: 2024-11-28 17:19:20.798803 

6 

7""" 

8import json 

9 

10from alembic import op 

11import sqlalchemy as sa 

12from sqlalchemy.sql import table 

13import mindsdb.interfaces.storage.db # noqa 

14 

15# revision identifiers, used by Alembic. 

16revision = 'a8a3fac369e7' 

17down_revision = '0f89b523f346' 

18branch_labels = None 

19depends_on = None 

20 

21 

22def upgrade(): 

23 llm_log_table = table( 

24 'llm_log', 

25 sa.Column('id', sa.Integer), 

26 sa.Column('input', sa.String), 

27 sa.Column('output', sa.String), 

28 sa.Column('input_json', sa.JSON), 

29 sa.Column('output_json', sa.JSON) 

30 ) 

31 

32 with op.batch_alter_table('llm_log', schema=None) as batch_op: 

33 batch_op.add_column(sa.Column('input_json', sa.JSON(), nullable=True)) 

34 batch_op.add_column(sa.Column('output_json', sa.JSON(), nullable=True)) 

35 

36 connection = op.get_bind() 

37 for row in connection.execute(llm_log_table.select()): 

38 try: 

39 input_json = json.loads(row.input) 

40 except Exception: 

41 input_json = None 

42 

43 output_json = None 

44 try: 

45 if row.output is not None: 

46 output_json = [str(row.output)] 

47 except Exception: 

48 pass 

49 

50 connection.execute( 

51 llm_log_table.update().where( 

52 llm_log_table.c.id == row.id 

53 ).values(input_json=input_json, output_json=output_json) 

54 ) 

55 

56 with op.batch_alter_table('llm_log', schema=None) as batch_op: 

57 batch_op.drop_column('input') 

58 batch_op.alter_column('input_json', new_column_name='input') 

59 batch_op.drop_column('output') 

60 batch_op.alter_column('output_json', new_column_name='output') 

61 

62 

63def downgrade(): 

64 llm_log_table = table( 

65 'llm_log', 

66 sa.Column('id', sa.Integer), 

67 sa.Column('input', sa.JSON), 

68 sa.Column('output', sa.JSON), 

69 sa.Column('input_str', sa.String), 

70 sa.Column('output_str', sa.String) 

71 ) 

72 

73 with op.batch_alter_table('llm_log', schema=None) as batch_op: 

74 batch_op.add_column(sa.Column('input_str', sa.String(), nullable=True)) 

75 batch_op.add_column(sa.Column('output_str', sa.String(), nullable=True)) 

76 

77 connection = op.get_bind() 

78 for row in connection.execute(llm_log_table.select()): 

79 input_str = None 

80 if row.input is not None: 

81 try: 

82 input_str = json.dumps(row.input) 

83 except Exception: 

84 pass 

85 

86 output_str = None 

87 if isinstance(row.output, list): 

88 try: 

89 output_str = '\n'.join(row.output) 

90 except Exception: 

91 pass 

92 

93 connection.execute( 

94 llm_log_table.update().where( 

95 llm_log_table.c.id == row.id 

96 ).values(input_str=input_str, output_str=output_str) 

97 ) 

98 

99 with op.batch_alter_table('llm_log', schema=None) as batch_op: 

100 batch_op.drop_column('input') 

101 batch_op.alter_column('input_str', new_column_name='input') 

102 batch_op.drop_column('output') 

103 batch_op.alter_column('output_str', new_column_name='output')