mirror of
				https://github.com/ARM-software/workload-automation.git
				synced 2025-10-31 07:04:17 +00:00 
			
		
		
		
	output_processors/postgresql: Add support for new directory Artifacts
Reflecting the addition to being able to store directories as Artifacts enable uploading of a directory as a compressed tar file rather than storing the file directly.
This commit is contained in:
		| @@ -16,6 +16,7 @@ | ||||
| import os | ||||
| import uuid | ||||
| import collections | ||||
| import tarfile | ||||
|  | ||||
| try: | ||||
|     import psycopg2 | ||||
| @@ -92,7 +93,7 @@ class PostgresqlResultProcessor(OutputProcessor): | ||||
|         "create_target": "INSERT INTO Targets (oid, run_oid, target, cpus, os, os_version, hostid, hostname, abi, is_rooted, kernel_version, kernel_release, kernel_sha1, kernel_config, sched_features, page_size_kb, system_id, screen_resolution, prop, android_id, _pod_version, _pod_serialization_version) " | ||||
|                          "VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)", | ||||
|         "create_event": "INSERT INTO Events (oid, run_oid, job_oid, timestamp, message, _pod_version, _pod_serialization_version) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s", | ||||
|         "create_artifact": "INSERT INTO Artifacts (oid, run_oid, job_oid, name, large_object_uuid, description, kind, _pod_version, _pod_serialization_version) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s)", | ||||
|         "create_artifact": "INSERT INTO Artifacts (oid, run_oid, job_oid, name, large_object_uuid, description, kind, is_dir, _pod_version, _pod_serialization_version) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)", | ||||
|         "create_metric": "INSERT INTO Metrics (oid, run_oid, job_oid, name, value, units, lower_is_better, _pod_version, _pod_serialization_version) VALUES (%s, %s, %s, %s, %s, %s , %s, %s, %s)", | ||||
|         "create_augmentation": "INSERT INTO Augmentations (oid, run_oid, name) VALUES (%s, %s, %s)", | ||||
|         "create_classifier": "INSERT INTO Classifiers (oid, artifact_oid, metric_oid, job_oid, run_oid, key, value) VALUES (%s, %s, %s, %s, %s, %s, %s)", | ||||
| @@ -534,7 +535,7 @@ class PostgresqlResultProcessor(OutputProcessor): | ||||
|                   'with the create command' | ||||
|             raise OutputProcessorError(msg.format(db_schema_version, local_schema_version)) | ||||
|  | ||||
|     def _sql_write_lobject(self, source, lobject): | ||||
|     def _sql_write_file_lobject(self, source, lobject): | ||||
|         with open(source) as lobj_file: | ||||
|             lobj_data = lobj_file.read() | ||||
|         if len(lobj_data) > 50000000:  # Notify if LO inserts larger than 50MB | ||||
| @@ -542,10 +543,18 @@ class PostgresqlResultProcessor(OutputProcessor): | ||||
|         lobject.write(lobj_data) | ||||
|         self.conn.commit() | ||||
|  | ||||
|     def _sql_write_dir_lobject(self, source, lobject): | ||||
|         with tarfile.open(fileobj=lobject, mode='w|gz') as lobj_dir: | ||||
|             lobj_dir.add(source, arcname='.') | ||||
|         self.conn.commit() | ||||
|  | ||||
|     def _sql_update_artifact(self, artifact, output_object): | ||||
|         self.logger.debug('Updating artifact: {}'.format(artifact)) | ||||
|         lobj = self.conn.lobject(oid=self.artifacts_already_added[artifact], mode='w') | ||||
|         self._sql_write_lobject(os.path.join(output_object.basepath, artifact.path), lobj) | ||||
|         if artifact.is_dir: | ||||
|             self._sql_write_dir_lobject(os.path.join(output_object.basepath, artifact.path), lobj) | ||||
|         else: | ||||
|             self._sql_write_file_lobject(os.path.join(output_object.basepath, artifact.path), lobj) | ||||
|  | ||||
|     def _sql_create_artifact(self, artifact, output_object, record_in_added=False, job_uuid=None): | ||||
|         self.logger.debug('Uploading artifact: {}'.format(artifact)) | ||||
| @@ -553,8 +562,10 @@ class PostgresqlResultProcessor(OutputProcessor): | ||||
|         lobj = self.conn.lobject() | ||||
|         loid = lobj.oid | ||||
|         large_object_uuid = uuid.uuid4() | ||||
|  | ||||
|         self._sql_write_lobject(os.path.join(output_object.basepath, artifact.path), lobj) | ||||
|         if artifact.is_dir: | ||||
|             self._sql_write_dir_lobject(os.path.join(output_object.basepath, artifact.path), lobj) | ||||
|         else: | ||||
|             self._sql_write_file_lobject(os.path.join(output_object.basepath, artifact.path), lobj) | ||||
|  | ||||
|         self.cursor.execute( | ||||
|             self.sql_command['create_large_object'], | ||||
| @@ -573,6 +584,7 @@ class PostgresqlResultProcessor(OutputProcessor): | ||||
|                 large_object_uuid, | ||||
|                 artifact.description, | ||||
|                 str(artifact.kind), | ||||
|                 artifact.is_dir, | ||||
|                 artifact._pod_version,  # pylint: disable=protected-access | ||||
|                 artifact._pod_serialization_version,  # pylint: disable=protected-access | ||||
|             ) | ||||
|   | ||||
		Reference in New Issue
	
	Block a user