From e8cfa2349596a834905a78f435436241dda5e2a2 Mon Sep 17 00:00:00 2001 From: Dannon Baker Date: Thu, 3 Nov 2022 17:06:41 -0400 Subject: [PATCH 001/334] First steps separating webpack builds, extracting toolshed. These are never deployed on the same host and the caching of shared objects is not useful, and this allows us to trim lots from the toolshed builds. --- client/webpack.config.js | 99 +++++++++++++++++++++++++--------------- 1 file changed, 62 insertions(+), 37 deletions(-) diff --git a/client/webpack.config.js b/client/webpack.config.js index 3961aeefcb59..aa6fe5218acf 100644 --- a/client/webpack.config.js +++ b/client/webpack.config.js @@ -27,17 +27,11 @@ const modulesExcludedFromLibs = [ const buildDate = new Date(); -module.exports = (env = {}, argv = {}) => { - // environment name based on -d, -p, webpack flag +const baseConfig = (env = {}, argv = {}) => { const targetEnv = process.env.NODE_ENV == "production" || argv.mode == "production" ? "production" : "development"; - const buildconfig = { + const buildConfig = { mode: targetEnv, - entry: { - analysis: ["polyfills", "bundleEntries", "entry/analysis"], - generic: ["polyfills", "bundleEntries", "entry/generic"], - toolshed: ["polyfills", "bundleToolshed", "entry/generic"], - }, output: { path: path.join(__dirname, "../", "/static/dist"), filename: "[name].bundled.js", @@ -225,40 +219,71 @@ module.exports = (env = {}, argv = {}) => { }), }), ], - devServer: { - client: { - overlay: { - errors: true, - warnings: false, - }, - webSocketURL: { - port: process.env.GITPOD_WORKSPACE_ID ? 443 : undefined, - }, + }; + + if (process.env.GXY_BUILD_SOURCEMAPS || buildConfig.mode == "development") { + buildConfig.devtool = "eval-cheap-source-map"; + } + + return buildConfig; +}; + +const analysisConfig = (env = {}, argv = {}) => { + // environment name based on -d, -p, webpack flag + const buildConfig = baseConfig(env, argv); + + buildConfig.entry = { + analysis: ["polyfills", "bundleEntries", "entry/analysis"], + generic: ["polyfills", "bundleEntries", "entry/generic"], + }; + + buildConfig.devServer = { + client: { + overlay: { + errors: true, + warnings: false, }, - allowedHosts: process.env.GITPOD_WORKSPACE_ID ? "all" : "auto", - devMiddleware: { - publicPath: "/static/dist", + webSocketURL: { + port: process.env.GITPOD_WORKSPACE_ID ? 443 : undefined, }, - hot: true, - port: 8081, - host: "0.0.0.0", - // proxy *everything* to the galaxy server. - // someday, when we have a fully API-driven independent client, this - // can be a more limited set -- e.g. `/api`, `/auth` - proxy: { - "**": { - target: process.env.GALAXY_URL || "http://localhost:8080", - secure: process.env.CHANGE_ORIGIN ? !process.env.CHANGE_ORIGIN : true, - changeOrigin: !!process.env.CHANGE_ORIGIN, - logLevel: "debug", - }, + }, + allowedHosts: process.env.GITPOD_WORKSPACE_ID ? "all" : "auto", + devMiddleware: { + publicPath: "/static/dist", + }, + hot: true, + port: 8081, + host: "0.0.0.0", + // proxy *everything* to the galaxy server. + // someday, when we have a fully API-driven independent client, this + // can be a more limited set -- e.g. `/api`, `/auth` + proxy: { + "**": { + target: process.env.GALAXY_URL || "http://localhost:8080", + secure: process.env.CHANGE_ORIGIN ? !process.env.CHANGE_ORIGIN : true, + changeOrigin: !!process.env.CHANGE_ORIGIN, + logLevel: "debug", }, }, }; - if (process.env.GXY_BUILD_SOURCEMAPS || buildconfig.mode == "development") { - buildconfig.devtool = "eval-cheap-source-map"; - } + return buildConfig; +}; - return buildconfig; +const toolshedConfig = (env = {}, argv = {}) => { + // environment name based on -d, -p, webpack flag + + const buildConfig = baseConfig(env, argv); + + buildConfig.entry = { + toolshed: ["polyfills", "bundleToolshed", "entry/generic"], + }; + buildConfig.optimization = { + minimize: true, + minimizer: [`...`, new CssMinimizerPlugin()], + }; + + return buildConfig; }; + +module.exports = [analysisConfig, toolshedConfig]; From fa9c3e4c95fc615bc94e06afbe4ee6dcbf09c20d Mon Sep 17 00:00:00 2001 From: Dannon Baker Date: Thu, 3 Nov 2022 17:23:24 -0400 Subject: [PATCH 002/334] Allow parallel builds, enh specificity for optimizatino rules since we'll do toolshed differently. --- client/webpack.config.js | 42 +++++++++++++++++++++------------------- 1 file changed, 22 insertions(+), 20 deletions(-) diff --git a/client/webpack.config.js b/client/webpack.config.js index aa6fe5218acf..cd3f7b811622 100644 --- a/client/webpack.config.js +++ b/client/webpack.config.js @@ -58,26 +58,6 @@ const baseConfig = (env = {}, argv = {}) => { config$: path.join(scriptsBase, "config", targetEnv) + ".js", }, }, - optimization: { - splitChunks: { - cacheGroups: { - styles: { - name: "base", - chunks: "all", - test: (m) => m.constructor.name == "CssModule", - priority: -5, - }, - libs: { - name: "libs", - test: new RegExp(`node_modules[\\/](?!(${modulesExcludedFromLibs})[\\/])|galaxy/scripts/libs`), - chunks: "all", - priority: -10, - }, - }, - }, - minimize: true, - minimizer: [`...`, new CssMinimizerPlugin()], - }, module: { rules: [ { @@ -237,6 +217,27 @@ const analysisConfig = (env = {}, argv = {}) => { generic: ["polyfills", "bundleEntries", "entry/generic"], }; + buildConfig.optimization = { + splitChunks: { + cacheGroups: { + styles: { + name: "base", + chunks: "all", + test: (m) => m.constructor.name == "CssModule", + priority: -5, + }, + libs: { + name: "libs", + test: new RegExp(`node_modules[\\/](?!(${modulesExcludedFromLibs})[\\/])|galaxy/scripts/libs`), + chunks: "all", + priority: -10, + }, + }, + }, + minimize: true, + minimizer: [`...`, new CssMinimizerPlugin()], + }; + buildConfig.devServer = { client: { overlay: { @@ -287,3 +288,4 @@ const toolshedConfig = (env = {}, argv = {}) => { }; module.exports = [analysisConfig, toolshedConfig]; +module.exports.parallelism = 2; From 345ec1b4244d94066d9aa3ff7beb4a9f9b3d109a Mon Sep 17 00:00:00 2001 From: Dannon Baker Date: Thu, 3 Nov 2022 17:58:46 -0400 Subject: [PATCH 003/334] optimization tweaks for toolshed entry --- client/webpack.config.js | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/client/webpack.config.js b/client/webpack.config.js index cd3f7b811622..d30f0a68860c 100644 --- a/client/webpack.config.js +++ b/client/webpack.config.js @@ -280,8 +280,10 @@ const toolshedConfig = (env = {}, argv = {}) => { toolshed: ["polyfills", "bundleToolshed", "entry/generic"], }; buildConfig.optimization = { - minimize: true, - minimizer: [`...`, new CssMinimizerPlugin()], + splitChunks: { + // include all types of chunks + chunks: "all", + }, }; return buildConfig; From 998cd562c161160f1ac969adfffe56da4fda6815 Mon Sep 17 00:00:00 2001 From: Pablo Moreno Date: Mon, 16 Jan 2023 15:05:43 +0000 Subject: [PATCH 004/334] Update cellxgene interactive tool to 1.1.1 Call is the same on 1.1.1, at least as it is used here. Will try to add some options here as well (to specify gene symbols field and others). --- tools/interactive/interactivetool_cellxgene.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tools/interactive/interactivetool_cellxgene.xml b/tools/interactive/interactivetool_cellxgene.xml index e9069afcb0af..06f39d7110d1 100644 --- a/tools/interactive/interactivetool_cellxgene.xml +++ b/tools/interactive/interactivetool_cellxgene.xml @@ -1,6 +1,6 @@ - + - quay.io/biocontainers/cellxgene:0.16.2--py_0 + quay.io/biocontainers/cellxgene:1.1.1--pyhdfd78af_0 From b860465f9c248171a87f742c04332dd6a3754534 Mon Sep 17 00:00:00 2001 From: Pablo Moreno Date: Mon, 16 Jan 2023 17:28:43 +0000 Subject: [PATCH 005/334] Handling of files not indexed by gene symbols and output recovery --- .../interactive/interactivetool_cellxgene.xml | 51 +++++++++++++++++-- 1 file changed, 46 insertions(+), 5 deletions(-) diff --git a/tools/interactive/interactivetool_cellxgene.xml b/tools/interactive/interactivetool_cellxgene.xml index 06f39d7110d1..584423b59251 100644 --- a/tools/interactive/interactivetool_cellxgene.xml +++ b/tools/interactive/interactivetool_cellxgene.xml @@ -1,6 +1,9 @@ + - quay.io/biocontainers/cellxgene:1.1.1--pyhdfd78af_0 + + quay.io/biocontainers/cellxgene:1.1.1--pyhdfd78af_0 @@ -10,16 +13,54 @@ + + + - + + - + + + + From acd862c6329438d0281016b4e9907fce6f390d36 Mon Sep 17 00:00:00 2001 From: Pablo Moreno Date: Mon, 16 Jan 2023 17:29:22 +0000 Subject: [PATCH 006/334] Clean up --- tools/interactive/interactivetool_cellxgene.xml | 3 --- 1 file changed, 3 deletions(-) diff --git a/tools/interactive/interactivetool_cellxgene.xml b/tools/interactive/interactivetool_cellxgene.xml index 584423b59251..e1983198ba33 100644 --- a/tools/interactive/interactivetool_cellxgene.xml +++ b/tools/interactive/interactivetool_cellxgene.xml @@ -1,8 +1,5 @@ - - quay.io/biocontainers/cellxgene:1.1.1--pyhdfd78af_0 From acc34f6d2110d09858f5ff843f4c8d623a0617d9 Mon Sep 17 00:00:00 2001 From: Pablo Moreno Date: Tue, 7 Mar 2023 08:59:43 +0000 Subject: [PATCH 007/334] Adds help --- .../interactive/interactivetool_cellxgene.xml | 25 ++++++++++++++++--- 1 file changed, 22 insertions(+), 3 deletions(-) diff --git a/tools/interactive/interactivetool_cellxgene.xml b/tools/interactive/interactivetool_cellxgene.xml index e1983198ba33..590ea03374d9 100644 --- a/tools/interactive/interactivetool_cellxgene.xml +++ b/tools/interactive/interactivetool_cellxgene.xml @@ -61,7 +61,26 @@ adata.write_h5ad(output, compression="gzip") - - Interactive tool for visualising AnnData. - + From e9e8f9be27b02815fc3ddbbe6211aa58593c1488 Mon Sep 17 00:00:00 2001 From: Pablo Moreno Date: Tue, 7 Mar 2023 12:03:34 +0000 Subject: [PATCH 008/334] Change name and tutorial link --- ...ool_cellxgene.xml => interactivetool_cellxgene_1.1.1.xml} | 5 +++++ 1 file changed, 5 insertions(+) rename tools/interactive/{interactivetool_cellxgene.xml => interactivetool_cellxgene_1.1.1.xml} (95%) diff --git a/tools/interactive/interactivetool_cellxgene.xml b/tools/interactive/interactivetool_cellxgene_1.1.1.xml similarity index 95% rename from tools/interactive/interactivetool_cellxgene.xml rename to tools/interactive/interactivetool_cellxgene_1.1.1.xml index 590ea03374d9..2fec1b7865ac 100644 --- a/tools/interactive/interactivetool_cellxgene.xml +++ b/tools/interactive/interactivetool_cellxgene_1.1.1.xml @@ -82,5 +82,10 @@ Outputs If you create gene sets or differential experession sets, this will be available as a collection of files under "Cellxgene user annotations and gene sets" at the end of the execution (when you stop the interactive environment). + +Tutorials +--------- + +You can find cellxgene tuorials `here `_ . ]]> From 999322b35438b1dd5d6d6a70e3d9b6f43b2cd1d4 Mon Sep 17 00:00:00 2001 From: Pablo Moreno Date: Tue, 7 Mar 2023 12:05:10 +0000 Subject: [PATCH 009/334] Adds previous version as separate file --- .../interactivetool_cellxgene_0.16.2.xml | 29 +++++++++++++++++++ 1 file changed, 29 insertions(+) create mode 100644 tools/interactive/interactivetool_cellxgene_0.16.2.xml diff --git a/tools/interactive/interactivetool_cellxgene_0.16.2.xml b/tools/interactive/interactivetool_cellxgene_0.16.2.xml new file mode 100644 index 000000000000..e9069afcb0af --- /dev/null +++ b/tools/interactive/interactivetool_cellxgene_0.16.2.xml @@ -0,0 +1,29 @@ + + + quay.io/biocontainers/cellxgene:0.16.2--py_0 + + + + 80 + + + + + + + + + + + + + + Interactive tool for visualising AnnData. + + From 6fe3db1db7abd4a2a1b921755f9fa7b1434405b1 Mon Sep 17 00:00:00 2001 From: Pablo Moreno Date: Tue, 7 Mar 2023 12:06:55 +0000 Subject: [PATCH 010/334] Update tool_conf.xml.sample --- lib/galaxy/config/sample/tool_conf.xml.sample | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/galaxy/config/sample/tool_conf.xml.sample b/lib/galaxy/config/sample/tool_conf.xml.sample index b713c208d1a6..401d10a838a6 100644 --- a/lib/galaxy/config/sample/tool_conf.xml.sample +++ b/lib/galaxy/config/sample/tool_conf.xml.sample @@ -133,7 +133,8 @@ - + + From 703751b65c7b3a8c2656dea896a5ea3b4f69b8a7 Mon Sep 17 00:00:00 2001 From: Pablo Moreno Date: Wed, 8 Mar 2023 13:17:11 +0000 Subject: [PATCH 011/334] Avoid errors automatically detected by galaxy due to stderr output --- tools/interactive/interactivetool_cellxgene_1.1.1.xml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/tools/interactive/interactivetool_cellxgene_1.1.1.xml b/tools/interactive/interactivetool_cellxgene_1.1.1.xml index 2fec1b7865ac..08edd9273b4d 100644 --- a/tools/interactive/interactivetool_cellxgene_1.1.1.xml +++ b/tools/interactive/interactivetool_cellxgene_1.1.1.xml @@ -7,6 +7,13 @@ 80 + + + Date: Mon, 27 Mar 2023 15:06:15 +0100 Subject: [PATCH 012/334] Layer and optionally make var field unique. --- .../interactivetool_cellxgene_1.1.1.xml | 60 +++++++++++++------ 1 file changed, 42 insertions(+), 18 deletions(-) diff --git a/tools/interactive/interactivetool_cellxgene_1.1.1.xml b/tools/interactive/interactivetool_cellxgene_1.1.1.xml index 08edd9273b4d..14bbcfc97bd3 100644 --- a/tools/interactive/interactivetool_cellxgene_1.1.1.xml +++ b/tools/interactive/interactivetool_cellxgene_1.1.1.xml @@ -15,19 +15,22 @@ /> @@ -44,21 +47,31 @@ def rn(df, field, suffix = '-duplicate-'): adata = ad.read_h5ad(sys.argv[1]) output = sys.argv[2] gene_symbol_field = sys.argv[3] -if gene_symbol_field not in adata.var.keys(): - sys.exit(f"Field {gene_symbol_field} set as var_name does not exist in the var object. AnnData object will be used as it was given") +make_unique = (sys.argv[4].lower() == "true") +layer = sys.argv[5] -adata.var = rn(adata.var, gene_symbol_field, suffix = "_d") +if gene_symbol_field and make_unique: + if gene_symbol_field not in adata.var.keys(): + sys.exit(f"Field {gene_symbol_field} set as var_name does not exist in the var object. AnnData object will be used as it was given") + + adata.var = rn(adata.var, gene_symbol_field, suffix = "_d") + adata.var["extra_gene_id"] = adata.var.index + adata.var = adata.var.set_index(f"{gene_symbol_field}_u") -adata.var["extra_gene_id"] = adata.var.index -adata.var = adata.var.set_index(f"{gene_symbol_field}_u") +if layer: + if layer not in adata.layers.keys(): + sys.exit(f"Layer {layer} is not present in AnnData, only available layers are: {', '.join(adata.layers.keys())}") + else: + adata.X = adata.layers[layer] - -adata.write_h5ad(output, compression="gzip") +adata.write_h5ad(output) ]]> - + + + @@ -80,9 +93,20 @@ Selecting the Var name ---------------------- It can happen that the main index for the var element of AnnData is not the gene symbol field, in which case search by genes will probably be by identifier. -You can choose a different field (which contains the gene symbols) so that +You can choose a different field and celxgene will use this. If in addition you choose "Make unique", the AnnData is modified (in a new copy) so that that field is made unique and -it is indexed by it. Then cellxgene will allow searches by genes in that field. +it is indexed by it. Making it unique entails though loading the object into memory, +modifying and writing it back, which can delay the execution of cellxgene. + +Then cellxgene will allow searches by genes in that field. + +Selecting the layer +------------------- + +It can happen that the AnnData object contains multiple layers, for example +one with the raw counts and another with the normalised counts. You can select +which layer to use as the matrix for cellxgene. By default cellxgene will use the X slot, but +that slot might not contain the matrix that you want to visualise. Outputs ------- From 1741fa45ee1840603e06ede10367614e0f6f9a13 Mon Sep 17 00:00:00 2001 From: Pablo Moreno Date: Tue, 28 Mar 2023 10:19:54 +0100 Subject: [PATCH 013/334] Some tabs to spaces --- tools/interactive/interactivetool_cellxgene_1.1.1.xml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tools/interactive/interactivetool_cellxgene_1.1.1.xml b/tools/interactive/interactivetool_cellxgene_1.1.1.xml index 14bbcfc97bd3..c8c6c4cb72bc 100644 --- a/tools/interactive/interactivetool_cellxgene_1.1.1.xml +++ b/tools/interactive/interactivetool_cellxgene_1.1.1.xml @@ -17,21 +17,21 @@ From 12a94e2ec4de372f749e2f7591de8c3ec52aeb3f Mon Sep 17 00:00:00 2001 From: Matthias Bernt Date: Sun, 29 Jan 2023 13:52:51 +0100 Subject: [PATCH 014/334] add parameter name to validation errors --- lib/galaxy/tools/parameters/basic.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/lib/galaxy/tools/parameters/basic.py b/lib/galaxy/tools/parameters/basic.py index be1ed9002c0d..278f0e6232fe 100644 --- a/lib/galaxy/tools/parameters/basic.py +++ b/lib/galaxy/tools/parameters/basic.py @@ -290,7 +290,10 @@ def validate(self, value, trans=None): if value in ["", None] and self.optional: return for validator in self.validators: - validator.validate(value, trans) + try: + validator.validate(value, trans) + except ValueError as e: + raise ValueError(f"{self.name}: {e}") def to_dict(self, trans, other_values=None): """to_dict tool parameter. This can be overridden by subclasses.""" From 26bcfd70b7a30d07bee259c6018adafeb022bfe8 Mon Sep 17 00:00:00 2001 From: Matthias Bernt Date: Sun, 30 Jul 2023 18:33:18 +0200 Subject: [PATCH 015/334] reraise properly and add "Parameter" before parameter name --- lib/galaxy/tools/parameters/basic.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/lib/galaxy/tools/parameters/basic.py b/lib/galaxy/tools/parameters/basic.py index 278f0e6232fe..4f993d5fde10 100644 --- a/lib/galaxy/tools/parameters/basic.py +++ b/lib/galaxy/tools/parameters/basic.py @@ -293,7 +293,7 @@ def validate(self, value, trans=None): try: validator.validate(value, trans) except ValueError as e: - raise ValueError(f"{self.name}: {e}") + raise ValueError(f"Parameter {self.name}: {e}") from None def to_dict(self, trans, other_values=None): """to_dict tool parameter. This can be overridden by subclasses.""" @@ -1993,7 +1993,11 @@ def do_validate(v): ): return else: - validator.validate(v, trans) + try: + validator.validate(v, trans) + except ValueError as e: + raise ValueError(f"Parameter {self.name}: {e}") from None + dataset_count = 0 if value: From 5c050ce12cfe3bcdf9347624210e4d4d3022c217 Mon Sep 17 00:00:00 2001 From: Matthias Bernt Date: Sun, 30 Jul 2023 18:34:44 +0200 Subject: [PATCH 016/334] move doc tests to unit tests and fix them --- lib/galaxy/tools/parameters/validation.py | 369 +---------------- .../app/tools/test_parameter_validation.py | 377 ++++++++++++++++++ 2 files changed, 379 insertions(+), 367 deletions(-) diff --git a/lib/galaxy/tools/parameters/validation.py b/lib/galaxy/tools/parameters/validation.py index f9af7b2fe619..4240bb2fb3c4 100644 --- a/lib/galaxy/tools/parameters/validation.py +++ b/lib/galaxy/tools/parameters/validation.py @@ -15,12 +15,6 @@ log = logging.getLogger(__name__) -def get_test_fname(fname): - """Returns test data filename""" - path, name = os.path.split(__file__) - full_path = os.path.join(path, "test", fname) - return full_path - class Validator(abc.ABC): """ @@ -87,45 +81,6 @@ def validate(self, value, trans=None, message=None, value_to_show=None): class RegexValidator(Validator): """ Validator that evaluates a regular expression - - >>> from galaxy.util import XML - >>> from galaxy.tools.parameters.basic import ToolParameter - >>> p = ToolParameter.build(None, XML(''' - ... - ... [Ff]oo - ... - ... ''')) - >>> t = p.validate("Foo") - >>> t = p.validate("foo") - >>> t = p.validate("Fop") - Traceback (most recent call last): - ... - ValueError: Value 'Fop' does not match regular expression '[Ff]oo' - >>> t = p.validate(["Foo", "foo"]) - >>> t = p.validate(["Foo", "Fop"]) - Traceback (most recent call last): - ... - ValueError: Value 'Fop' does not match regular expression '[Ff]oo' - >>> - >>> p = ToolParameter.build(None, XML(''' - ... - ... [Ff]oo - ... - ... ''')) - >>> t = p.validate("Foo") - Traceback (most recent call last): - ... - ValueError: Value 'Foo' does match regular expression '[Ff]oo' - >>> t = p.validate("foo") - Traceback (most recent call last): - ... - ValueError: Value 'foo' does match regular expression '[Ff]oo' - >>> t = p.validate("Fop") - >>> t = p.validate(["Fop", "foo"]) - Traceback (most recent call last): - ... - ValueError: Value 'foo' does match regular expression '[Ff]oo' - >>> t = p.validate(["Fop", "Fop"]) """ @classmethod @@ -176,40 +131,6 @@ def validate(self, value, trans=None): class InRangeValidator(ExpressionValidator): """ Validator that ensures a number is in a specified range - - >>> from galaxy.util import XML - >>> from galaxy.tools.parameters.basic import ToolParameter - >>> p = ToolParameter.build(None, XML(''' - ... - ... - ... - ... ''')) - >>> t = p.validate(10) - Traceback (most recent call last): - ... - ValueError: Doh!! 10 not in range - >>> t = p.validate(15) - >>> t = p.validate(20) - >>> t = p.validate(21) - Traceback (most recent call last): - ... - ValueError: Doh!! 21 not in range - >>> - >>> p = ToolParameter.build(None, XML(''' - ... - ... - ... - ... ''')) - >>> t = p.validate(10) - >>> t = p.validate(15) - Traceback (most recent call last): - ... - ValueError: Value ('15') must not fulfill float('10') < value <= float('20') - >>> t = p.validate(20) - Traceback (most recent call last): - ... - ValueError: Value ('20') must not fulfill float('10') < value <= float('20') - >>> t = p.validate(21) """ @classmethod @@ -252,40 +173,6 @@ def __init__(self, message, range_min, range_max, exclude_min=False, exclude_max class LengthValidator(InRangeValidator): """ Validator that ensures the length of the provided string (value) is in a specific range - - >>> from galaxy.util import XML - >>> from galaxy.tools.parameters.basic import ToolParameter - >>> p = ToolParameter.build(None, XML(''' - ... - ... - ... - ... ''')) - >>> t = p.validate("foo") - >>> t = p.validate("bar") - >>> t = p.validate("f") - Traceback (most recent call last): - ... - ValueError: Must have length of at least 2 and at most 8 - >>> t = p.validate("foobarbaz") - Traceback (most recent call last): - ... - ValueError: Must have length of at least 2 and at most 8 - >>> - >>> p = ToolParameter.build(None, XML(''' - ... - ... - ... - ... ''')) - >>> t = p.validate("foo") - Traceback (most recent call last): - ... - ValueError: Must not have length of at least 2 and at most 8 - >>> t = p.validate("bar") - Traceback (most recent call last): - ... - ValueError: Must not have length of at least 2 and at most 8 - >>> t = p.validate("f") - >>> t = p.validate("foobarbaz") """ @classmethod @@ -304,44 +191,6 @@ def validate(self, value, trans=None): class DatasetOkValidator(Validator): """ Validator that checks if a dataset is in an 'ok' state - - >>> from galaxy.datatypes.registry import example_datatype_registry_for_sample - >>> from galaxy.model import History, HistoryDatasetAssociation, set_datatypes_registry - >>> from galaxy.model.mapping import init - >>> from galaxy.util import XML - >>> from galaxy.tools.parameters.basic import ToolParameter - >>> - >>> sa_session = init("/tmp", "sqlite:///:memory:", create_tables=True).session - >>> hist = History() - >>> with sa_session.begin(): - ... sa_session.add(hist) - >>> set_datatypes_registry(example_datatype_registry_for_sample()) - >>> ok_hda = hist.add_dataset(HistoryDatasetAssociation(id=1, extension='interval', create_dataset=True, sa_session=sa_session)) - >>> ok_hda.set_dataset_state(model.Dataset.states.OK) - >>> notok_hda = hist.add_dataset(HistoryDatasetAssociation(id=2, extension='interval', create_dataset=True, sa_session=sa_session)) - >>> notok_hda.set_dataset_state(model.Dataset.states.EMPTY) - >>> - >>> p = ToolParameter.build(None, XML(''' - ... - ... - ... - ... ''')) - >>> t = p.validate(ok_hda) - >>> t = p.validate(notok_hda) - Traceback (most recent call last): - ... - ValueError: The selected dataset is still being generated, select another dataset or wait until it is completed - >>> - >>> p = ToolParameter.build(None, XML(''' - ... - ... - ... - ... ''')) - >>> t = p.validate(ok_hda) - Traceback (most recent call last): - ... - ValueError: The selected dataset must not be in state OK - >>> t = p.validate(notok_hda) """ @classmethod @@ -363,44 +212,6 @@ def validate(self, value, trans=None): class DatasetEmptyValidator(Validator): """ Validator that checks if a dataset has a positive file size. - - >>> from galaxy.datatypes.registry import example_datatype_registry_for_sample - >>> from galaxy.model import Dataset, History, HistoryDatasetAssociation, set_datatypes_registry - >>> from galaxy.model.mapping import init - >>> from galaxy.util import XML - >>> from galaxy.tools.parameters.basic import ToolParameter - >>> - >>> sa_session = init("/tmp", "sqlite:///:memory:", create_tables=True).session - >>> hist = History() - >>> with sa_session.begin(): - ... sa_session.add(hist) - >>> set_datatypes_registry(example_datatype_registry_for_sample()) - >>> empty_dataset = Dataset(external_filename=get_test_fname("empty.txt")) - >>> empty_hda = hist.add_dataset(HistoryDatasetAssociation(id=1, extension='interval', dataset=empty_dataset, sa_session=sa_session)) - >>> full_dataset = Dataset(external_filename=get_test_fname("1.tabular")) - >>> full_hda = hist.add_dataset(HistoryDatasetAssociation(id=2, extension='interval', dataset=full_dataset, sa_session=sa_session)) - >>> - >>> p = ToolParameter.build(None, XML(''' - ... - ... - ... - ... ''')) - >>> t = p.validate(full_hda) - >>> t = p.validate(empty_hda) - Traceback (most recent call last): - ... - ValueError: The selected dataset is empty, this tool expects non-empty files. - >>> - >>> p = ToolParameter.build(None, XML(''' - ... - ... - ... - ... ''')) - >>> t = p.validate(full_hda) - Traceback (most recent call last): - ... - ValueError: The selected dataset is non-empty, this tool expects empty files. - >>> t = p.validate(empty_hda) """ @classmethod @@ -412,6 +223,8 @@ def from_element(cls, param, elem): return cls(message, negate) def validate(self, value, trans=None): + print(f"value {value}") + print(f"value {value.get_size()}") if value: super().validate(value.get_size() != 0) @@ -419,46 +232,6 @@ def validate(self, value, trans=None): class DatasetExtraFilesPathEmptyValidator(Validator): """ Validator that checks if a dataset's extra_files_path exists and is not empty. - - >>> from galaxy.datatypes.registry import example_datatype_registry_for_sample - >>> from galaxy.model import History, HistoryDatasetAssociation, set_datatypes_registry - >>> from galaxy.model.mapping import init - >>> from galaxy.util import XML - >>> from galaxy.tools.parameters.basic import ToolParameter - >>> - >>> sa_session = init("/tmp", "sqlite:///:memory:", create_tables=True).session - >>> hist = History() - >>> with sa_session.begin(): - ... sa_session.add(hist) - >>> set_datatypes_registry(example_datatype_registry_for_sample()) - >>> has_extra_hda = hist.add_dataset(HistoryDatasetAssociation(id=1, extension='interval', create_dataset=True, sa_session=sa_session)) - >>> has_extra_hda.dataset.file_size = 10 - >>> has_extra_hda.dataset.total_size = 15 - >>> has_no_extra_hda = hist.add_dataset(HistoryDatasetAssociation(id=2, extension='interval', create_dataset=True, sa_session=sa_session)) - >>> has_no_extra_hda.dataset.file_size = 10 - >>> has_no_extra_hda.dataset.total_size = 10 - >>> - >>> p = ToolParameter.build(None, XML(''' - ... - ... - ... - ... ''')) - >>> t = p.validate(has_extra_hda) - >>> t = p.validate(has_no_extra_hda) - Traceback (most recent call last): - ... - ValueError: The selected dataset's extra_files_path directory is empty or does not exist, this tool expects non-empty extra_files_path directories associated with the selected input. - >>> - >>> p = ToolParameter.build(None, XML(''' - ... - ... - ... - ... ''')) - >>> t = p.validate(has_extra_hda) - Traceback (most recent call last): - ... - ValueError: The selected dataset's extra_files_path directory is non-empty or does exist, this tool expects empty extra_files_path directories associated with the selected input. - >>> t = p.validate(has_no_extra_hda) """ @classmethod @@ -477,58 +250,6 @@ def validate(self, value, trans=None): class MetadataValidator(Validator): """ Validator that checks for missing metadata - - >>> from galaxy.datatypes.registry import example_datatype_registry_for_sample - >>> from galaxy.model import Dataset, History, HistoryDatasetAssociation, set_datatypes_registry - >>> from galaxy.model.mapping import init - >>> from galaxy.util import XML - >>> from galaxy.tools.parameters.basic import ToolParameter - >>> - >>> sa_session = init("/tmp", "sqlite:///:memory:", create_tables=True).session - >>> hist = History() - >>> with sa_session.begin(): - ... sa_session.add(hist) - >>> set_datatypes_registry(example_datatype_registry_for_sample()) - >>> fname = get_test_fname('1.bed') - >>> bedds = Dataset(external_filename=fname) - >>> hda = hist.add_dataset(HistoryDatasetAssociation(id=1, extension='bed', create_dataset=True, sa_session=sa_session, dataset=bedds)) - >>> hda.set_dataset_state(model.Dataset.states.OK) - >>> hda.set_meta() - >>> hda.metadata.strandCol = hda.metadata.spec["strandCol"].no_value - >>> param_xml = ''' - ... - ... ''' - >>> p = ToolParameter.build(None, XML(param_xml.format(check="nameCol", skip=""))) - >>> t = p.validate(hda) - >>> p = ToolParameter.build(None, XML(param_xml.format(check="strandCol", skip=""))) - >>> t = p.validate(hda) - Traceback (most recent call last): - ... - ValueError: Metadata 'strandCol' missing, click the pencil icon in the history item to edit / save the metadata attributes - >>> p = ToolParameter.build(None, XML(param_xml.format(check="", skip="dbkey,comment_lines,column_names,strandCol"))) - >>> t = p.validate(hda) - >>> p = ToolParameter.build(None, XML(param_xml.format(check="", skip="dbkey,comment_lines,column_names,nameCol"))) - >>> t = p.validate(hda) - Traceback (most recent call last): - ... - ValueError: Metadata 'strandCol' missing, click the pencil icon in the history item to edit / save the metadata attributes - >>> param_xml_negate = ''' - ... - ... ''' - >>> p = ToolParameter.build(None, XML(param_xml_negate.format(check="strandCol", skip=""))) - >>> t = p.validate(hda) - >>> p = ToolParameter.build(None, XML(param_xml_negate.format(check="nameCol", skip=""))) - >>> t = p.validate(hda) - Traceback (most recent call last): - ... - ValueError: At least one of the checked metadata 'nameCol' is set, click the pencil icon in the history item to edit / save the metadata attributes - >>> p = ToolParameter.build(None, XML(param_xml_negate.format(check="", skip="dbkey,comment_lines,column_names,nameCol"))) - >>> t = p.validate(hda) - >>> p = ToolParameter.build(None, XML(param_xml_negate.format(check="", skip="dbkey,comment_lines,column_names,strandCol"))) - >>> t = p.validate(hda) - Traceback (most recent call last): - ... - ValueError: At least one of the non skipped metadata 'dbkey,comment_lines,column_names,strandCol' is set, click the pencil icon in the history item to edit / save the metadata attributes """ requires_dataset_metadata = True @@ -599,45 +320,6 @@ def validate(self, value, trans=None): class UnspecifiedBuildValidator(Validator): """ Validator that checks for dbkey not equal to '?' - - >>> from galaxy.datatypes.registry import example_datatype_registry_for_sample - >>> from galaxy.model import History, HistoryDatasetAssociation, set_datatypes_registry - >>> from galaxy.model.mapping import init - >>> from galaxy.util import XML - >>> from galaxy.tools.parameters.basic import ToolParameter - >>> - >>> sa_session = init("/tmp", "sqlite:///:memory:", create_tables=True).session - >>> hist = History() - >>> with sa_session.begin(): - ... sa_session.add(hist) - >>> set_datatypes_registry(example_datatype_registry_for_sample()) - >>> has_dbkey_hda = hist.add_dataset(HistoryDatasetAssociation(id=1, extension='interval', create_dataset=True, sa_session=sa_session)) - >>> has_dbkey_hda.set_dataset_state(model.Dataset.states.OK) - >>> has_dbkey_hda.metadata.dbkey = 'hg19' - >>> has_no_dbkey_hda = hist.add_dataset(HistoryDatasetAssociation(id=2, extension='interval', create_dataset=True, sa_session=sa_session)) - >>> has_no_dbkey_hda.set_dataset_state(model.Dataset.states.OK) - >>> - >>> p = ToolParameter.build(None, XML(''' - ... - ... - ... - ... ''')) - >>> t = p.validate(has_dbkey_hda) - >>> t = p.validate(has_no_dbkey_hda) - Traceback (most recent call last): - ... - ValueError: Unspecified genome build, click the pencil icon in the history item to set the genome build - >>> - >>> p = ToolParameter.build(None, XML(''' - ... - ... - ... - ... ''')) - >>> t = p.validate(has_dbkey_hda) - Traceback (most recent call last): - ... - ValueError: Specified genome build, click the pencil icon in the history item to remove the genome build - >>> t = p.validate(has_no_dbkey_hda) """ requires_dataset_metadata = True @@ -663,31 +345,6 @@ def validate(self, value, trans=None): class NoOptionsValidator(Validator): """ Validator that checks for empty select list - - >>> from galaxy.util import XML - >>> from galaxy.tools.parameters.basic import ToolParameter - >>> p = ToolParameter.build(None, XML(''' - ... - ... - ... - ... ''')) - >>> t = p.validate('foo') - >>> t = p.validate(None) - Traceback (most recent call last): - ... - ValueError: No options available for selection - >>> - >>> p = ToolParameter.build(None, XML(''' - ... - ... - ... - ... - ... ''')) - >>> t = p.validate('foo') - Traceback (most recent call last): - ... - ValueError: Options available for selection - >>> t = p.validate(None) """ @classmethod @@ -705,28 +362,6 @@ def validate(self, value, trans=None): class EmptyTextfieldValidator(Validator): """ Validator that checks for empty text field - - >>> from galaxy.util import XML - >>> from galaxy.tools.parameters.basic import ToolParameter - >>> p = ToolParameter.build(None, XML(''' - ... - ... - ... - ... ''')) - >>> t = p.validate("") - Traceback (most recent call last): - ... - ValueError: Field requires a value - >>> p = ToolParameter.build(None, XML(''' - ... - ... - ... - ... ''')) - >>> t = p.validate("foo") - Traceback (most recent call last): - ... - ValueError: Field must not set a value - >>> t = p.validate("") """ @classmethod diff --git a/test/unit/app/tools/test_parameter_validation.py b/test/unit/app/tools/test_parameter_validation.py index 77fcfee7030d..dcf8d66feefa 100644 --- a/test/unit/app/tools/test_parameter_validation.py +++ b/test/unit/app/tools/test_parameter_validation.py @@ -1,6 +1,15 @@ +from galaxy.datatypes.sniff import get_test_fname +from galaxy.model import Dataset, History, HistoryDatasetAssociation from .util import BaseParameterTestCase +# def get_test_fname(fname): +# """Returns test data filename""" +# path, name = os.path.split(__file__) +# full_path = os.path.join(path, "test", fname) +# return full_path + + class TestParameterValidation(BaseParameterTestCase): def test_simple_ExpressionValidator(self): p = self._parameter_for( @@ -62,3 +71,371 @@ def test_ExpressionValidator_message(self): ValueError, r"Validator 'value.lower\(\) == \"foo\"' could not be evaluated on '1'" ): p.validate(1) + + def test_NoOptionsValidator(self): + p = self._parameter_for( + xml=""" + + +""" + ) + p.validate("foo") + with self.assertRaisesRegex(ValueError, "Parameter index: No options available for selection"): + p.validate(None) + + p = self._parameter_for( + xml=""" + + + +""" + ) + with self.assertRaisesRegex(ValueError, "Parameter index: Options available for selection"): + p.validate("foo") + p.validate(None) + + def test_EmptyTextfieldValidator(self): + p = self._parameter_for( + xml=""" + + +""" + ) + p.validate("foo") + with self.assertRaisesRegex(ValueError, "Parameter blah: Field requires a value"): + p.validate("") + + p = self._parameter_for( + xml=""" + + +""" + ) + with self.assertRaisesRegex(ValueError, "Parameter blah: Field must not set a value"): + p.validate("foo") + p.validate("") + + def test_RegexValidator(self): + p = self._parameter_for( + xml=""" + + [Ff]oo +""" + ) + p.validate("Foo") + p.validate("foo") + with self.assertRaisesRegex( + ValueError, "Parameter blah: Value 'Fop' does not match regular expression '\[Ff\]oo'" + ): + p.validate("Fop") + + # test also valitation of lists (for select parameters) + p.validate(["Foo", "foo"]) + with self.assertRaisesRegex( + ValueError, "Parameter blah: Value 'Fop' does not match regular expression '\[Ff\]oo'" + ): + p.validate(["Foo", "Fop"]) + + p = self._parameter_for( + xml=""" + + [Ff]oo +""" + ) + with self.assertRaisesRegex( + ValueError, r"Parameter blah: Value 'Foo' does match regular expression '\[Ff\]oo'" + ): + p.validate("Foo") + with self.assertRaisesRegex( + ValueError, r"Parameter blah: Value 'foo' does match regular expression '\[Ff\]oo'" + ): + p.validate("foo") + p.validate("Fop") + with self.assertRaisesRegex( + ValueError, r"Parameter blah: Value 'foo' does match regular expression '\[Ff\]oo'" + ): + p.validate(["Fop", "foo"]) + p.validate(["Fop", "fop"]) + + def test_LengthValidator(self): + p = self._parameter_for( + xml=""" + + +""" + ) + p.validate("foo") + p.validate("bar") + with self.assertRaisesRegex(ValueError, "Parameter blah: Must have length of at least 2 and at most 8"): + p.validate("f") + with self.assertRaisesRegex(ValueError, "Parameter blah: Must have length of at least 2 and at most 8"): + p.validate("foobarbaz") + + p = self._parameter_for( + xml=""" + + +""" + ) + with self.assertRaisesRegex(ValueError, "Parameter blah: Must not have length of at least 2 and at most 8"): + p.validate("foo") + with self.assertRaisesRegex(ValueError, "Parameter blah: Must not have length of at least 2 and at most 8"): + p.validate("bar") + p.validate("f") + p.validate("foobarbaz") + + def test_InRangeValidator(self): + p = self._parameter_for( + xml=""" + + +""" + ) + with self.assertRaisesRegex(ValueError, "Parameter blah: Doh!! 10 not in range"): + p.validate(10) + p.validate(15) + p.validate(20) + with self.assertRaisesRegex(ValueError, "Parameter blah: Doh!! 21 not in range"): + p.validate(21) + + p = self._parameter_for( + xml=""" + + +""" + ) + p.validate(10) + with self.assertRaisesRegex( + ValueError, r"Parameter blah: Value \('15'\) must not fulfill float\('10'\) < value <= float\('20'\)" + ): + p.validate(15) + with self.assertRaisesRegex( + ValueError, r"Parameter blah: Value \('20'\) must not fulfill float\('10'\) < value <= float\('20'\)" + ): + p.validate(20) + p.validate(21) + + def test_DatasetOkValidator(self): + + sa_session = self.app.model.context + hist = History() + with sa_session.begin(): + sa_session.add(hist) + ok_hda = hist.add_dataset( + HistoryDatasetAssociation(id=1, extension="interval", create_dataset=True, sa_session=sa_session) + ) + ok_hda.set_dataset_state(Dataset.states.OK) + notok_hda = hist.add_dataset( + HistoryDatasetAssociation(id=2, extension="interval", create_dataset=True, sa_session=sa_session) + ) + notok_hda.set_dataset_state(Dataset.states.EMPTY) + + p = self._parameter_for( + xml=""" + + +""" + ) + p.validate(ok_hda) + with self.assertRaisesRegex( + ValueError, + "Parameter blah: The selected dataset is still being generated, select another dataset or wait until it is completed", + ): + p.validate(notok_hda) + p = self._parameter_for( + xml=""" + + +""" + ) + with self.assertRaisesRegex(ValueError, "Parameter blah: The selected dataset must not be in state OK"): + p.validate(ok_hda) + p.validate(notok_hda) + + def test_DatasetEmptyValidator(self): + sa_session = self.app.model.context + hist = History() + with sa_session.begin(): + sa_session.add(hist) + empty_dataset = Dataset(external_filename=get_test_fname("empty.txt")) + empty_hda = hist.add_dataset( + HistoryDatasetAssociation(id=1, extension="interval", dataset=empty_dataset, sa_session=sa_session) + ) + full_dataset = Dataset(external_filename=get_test_fname("1.json")) + full_hda = hist.add_dataset( + HistoryDatasetAssociation(id=2, extension="interval", dataset=full_dataset, sa_session=sa_session) + ) + + p = self._parameter_for( + xml=""" + + +""" + ) + p.validate(full_hda) + with self.assertRaisesRegex( + ValueError, "Parameter blah: The selected dataset is empty, this tool expects non-empty files." + ): + p.validate(empty_hda) + + p = self._parameter_for( + xml=""" + + +""" + ) + with self.assertRaisesRegex( + ValueError, "Parameter blah: The selected dataset is non-empty, this tool expects empty files." + ): + p.validate(full_hda) + p.validate(empty_hda) + + def test_DatasetExtraFilesPathEmptyValidator(self): + sa_session = self.app.model.context + hist = History() + with sa_session.begin(): + sa_session.add(hist) + has_extra_hda = hist.add_dataset( + HistoryDatasetAssociation(id=1, extension="interval", create_dataset=True, sa_session=sa_session) + ) + has_extra_hda.dataset.file_size = 10 + has_extra_hda.dataset.total_size = 15 + has_no_extra_hda = hist.add_dataset( + HistoryDatasetAssociation(id=2, extension="interval", create_dataset=True, sa_session=sa_session) + ) + has_no_extra_hda.dataset.file_size = 10 + has_no_extra_hda.dataset.total_size = 10 + + p = self._parameter_for( + xml=""" + + +""" + ) + p.validate(has_extra_hda) + with self.assertRaisesRegex( + ValueError, + "Parameter blah: The selected dataset's extra_files_path directory is empty or does not exist, this tool expects non-empty extra_files_path directories associated with the selected input.", + ): + p.validate(has_no_extra_hda) + + p = self._parameter_for( + xml=""" + + +""" + ) + + with self.assertRaisesRegex( + ValueError, + "Parameter blah: The selected dataset's extra_files_path directory is non-empty or does exist, this tool expects empty extra_files_path directories associated with the selected input.", + ): + p.validate(has_extra_hda) + p.validate(has_no_extra_hda) + + def test_MetadataValidator(self): + sa_session = self.app.model.context + hist = History() + with sa_session.begin(): + sa_session.add(hist) + hda = hist.add_dataset( + HistoryDatasetAssociation( + id=1, + extension="bed", + create_dataset=True, + sa_session=sa_session, + dataset=Dataset(external_filename=get_test_fname("1.bed")), + ) + ) + hda.set_dataset_state(Dataset.states.OK) + hda.set_meta() + hda.metadata.strandCol = hda.metadata.spec["strandCol"].no_value + + param_xml = """ + + +""" + + p = self._parameter_for(xml=param_xml.format(check="nameCol", skip="")) + p.validate(hda) + + p = self._parameter_for(xml=param_xml.format(check="strandCol", skip="")) + with self.assertRaisesRegex( + ValueError, + "Parameter blah: Metadata 'strandCol' missing, click the pencil icon in the history item to edit / save the metadata attributes", + ): + p.validate(hda) + + p = self._parameter_for(xml=param_xml.format(check="", skip="dbkey,comment_lines,column_names,strandCol")) + p.validate(hda) + p = self._parameter_for(xml=param_xml.format(check="", skip="dbkey,comment_lines,column_names,nameCol")) + with self.assertRaisesRegex( + ValueError, + "Parameter blah: Metadata 'strandCol' missing, click the pencil icon in the history item to edit / save the metadata attributes", + ): + p.validate(hda) + + param_xml_negate = """ + + +""" + p = self._parameter_for(xml=param_xml_negate.format(check="strandCol", skip="")) + p.validate(hda) + p = self._parameter_for(xml=param_xml_negate.format(check="nameCol", skip="")) + with self.assertRaisesRegex( + ValueError, + "Parameter blah: At least one of the checked metadata 'nameCol' is set, click the pencil icon in the history item to edit / save the metadata attributes", + ): + p.validate(hda) + + p = self._parameter_for(xml=param_xml_negate.format(check="", skip="dbkey,comment_lines,column_names,nameCol")) + p.validate(hda) + p = self._parameter_for( + xml=param_xml_negate.format(check="", skip="dbkey,comment_lines,column_names,strandCol") + ) + with self.assertRaisesRegex( + ValueError, + "Parameter blah: At least one of the non skipped metadata 'dbkey,comment_lines,column_names,strandCol' is set, click the pencil icon in the history item to edit / save the metadata attributes", + ): + p.validate(hda) + + def test_UnspecifiedBuildValidator(self): + sa_session = self.app.model.context + hist = History() + with sa_session.begin(): + sa_session.add(hist) + has_dbkey_hda = hist.add_dataset( + HistoryDatasetAssociation(id=1, extension="interval", create_dataset=True, sa_session=sa_session) + ) + has_dbkey_hda.set_dataset_state(Dataset.states.OK) + has_dbkey_hda.metadata.dbkey = "hg19" + has_no_dbkey_hda = hist.add_dataset( + HistoryDatasetAssociation(id=2, extension="interval", create_dataset=True, sa_session=sa_session) + ) + has_no_dbkey_hda.set_dataset_state(Dataset.states.OK) + + p = self._parameter_for( + xml=""" + + +""" + ) + p.validate(has_dbkey_hda) + with self.assertRaisesRegex( + ValueError, + "Parameter blah: Unspecified genome build, click the pencil icon in the history item to set the genome build", + ): + p.validate(has_no_dbkey_hda) + + p = self._parameter_for( + xml=""" + + +""" + ) + with self.assertRaisesRegex( + ValueError, + "Parameter blah: Specified genome build, click the pencil icon in the history item to remove the genome build", + ): + p.validate(has_dbkey_hda) + p.validate(has_no_dbkey_hda) From 4ca03dc64b6f54c40f529852cba3fc24ca03d850 Mon Sep 17 00:00:00 2001 From: Matthias Bernt Date: Sun, 30 Jul 2023 23:27:23 +0200 Subject: [PATCH 017/334] fix linter issues --- lib/galaxy/tools/parameters/basic.py | 3 +-- lib/galaxy/tools/parameters/validation.py | 1 - test/unit/app/tools/test_parameter_validation.py | 9 ++++----- 3 files changed, 5 insertions(+), 8 deletions(-) diff --git a/lib/galaxy/tools/parameters/basic.py b/lib/galaxy/tools/parameters/basic.py index 4f993d5fde10..c731ddc30472 100644 --- a/lib/galaxy/tools/parameters/basic.py +++ b/lib/galaxy/tools/parameters/basic.py @@ -293,7 +293,7 @@ def validate(self, value, trans=None): try: validator.validate(value, trans) except ValueError as e: - raise ValueError(f"Parameter {self.name}: {e}") from None + raise ValueError(f"Parameter {self.name}: {e}") from None def to_dict(self, trans, other_values=None): """to_dict tool parameter. This can be overridden by subclasses.""" @@ -1998,7 +1998,6 @@ def do_validate(v): except ValueError as e: raise ValueError(f"Parameter {self.name}: {e}") from None - dataset_count = 0 if value: if self.multiple: diff --git a/lib/galaxy/tools/parameters/validation.py b/lib/galaxy/tools/parameters/validation.py index 4240bb2fb3c4..081284e758ce 100644 --- a/lib/galaxy/tools/parameters/validation.py +++ b/lib/galaxy/tools/parameters/validation.py @@ -15,7 +15,6 @@ log = logging.getLogger(__name__) - class Validator(abc.ABC): """ A validator checks that a value meets some conditions OR raises ValueError diff --git a/test/unit/app/tools/test_parameter_validation.py b/test/unit/app/tools/test_parameter_validation.py index dcf8d66feefa..059a343fbd5e 100644 --- a/test/unit/app/tools/test_parameter_validation.py +++ b/test/unit/app/tools/test_parameter_validation.py @@ -125,14 +125,14 @@ def test_RegexValidator(self): p.validate("Foo") p.validate("foo") with self.assertRaisesRegex( - ValueError, "Parameter blah: Value 'Fop' does not match regular expression '\[Ff\]oo'" + ValueError, r"Parameter blah: Value 'Fop' does not match regular expression '\[Ff\]oo'" ): p.validate("Fop") # test also valitation of lists (for select parameters) p.validate(["Foo", "foo"]) with self.assertRaisesRegex( - ValueError, "Parameter blah: Value 'Fop' does not match regular expression '\[Ff\]oo'" + ValueError, r"Parameter blah: Value 'Fop' does not match regular expression '\[Ff\]oo'" ): p.validate(["Foo", "Fop"]) @@ -216,7 +216,6 @@ def test_InRangeValidator(self): p.validate(21) def test_DatasetOkValidator(self): - sa_session = self.app.model.context hist = History() with sa_session.begin(): @@ -243,7 +242,7 @@ def test_DatasetOkValidator(self): ): p.validate(notok_hda) p = self._parameter_for( - xml=""" + xml=""" """ @@ -375,7 +374,7 @@ def test_MetadataValidator(self): ): p.validate(hda) - param_xml_negate = """ + param_xml_negate = """ """ From 578049dd84e08e3560e5613348b4aa43a7de572b Mon Sep 17 00:00:00 2001 From: Matthias Bernt Date: Mon, 31 Jul 2023 11:03:11 +0200 Subject: [PATCH 018/334] linter: fix imports --- test/unit/app/tools/test_parameter_validation.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/test/unit/app/tools/test_parameter_validation.py b/test/unit/app/tools/test_parameter_validation.py index 059a343fbd5e..2ecd53c1d001 100644 --- a/test/unit/app/tools/test_parameter_validation.py +++ b/test/unit/app/tools/test_parameter_validation.py @@ -1,8 +1,11 @@ from galaxy.datatypes.sniff import get_test_fname -from galaxy.model import Dataset, History, HistoryDatasetAssociation +from galaxy.model import ( + Dataset, + History, + HistoryDatasetAssociation, +) from .util import BaseParameterTestCase - # def get_test_fname(fname): # """Returns test data filename""" # path, name = os.path.split(__file__) From 652751d11d5a70f5bb8858fb99f54eb50db84afe Mon Sep 17 00:00:00 2001 From: Yvan Le Bras Date: Wed, 20 Sep 2023 18:23:14 +0200 Subject: [PATCH 019/334] Update datatypes_conf.xml.sample with docx type --- lib/galaxy/config/sample/datatypes_conf.xml.sample | 2 ++ 1 file changed, 2 insertions(+) diff --git a/lib/galaxy/config/sample/datatypes_conf.xml.sample b/lib/galaxy/config/sample/datatypes_conf.xml.sample index 189bb83a95d9..07f6e8aeb910 100644 --- a/lib/galaxy/config/sample/datatypes_conf.xml.sample +++ b/lib/galaxy/config/sample/datatypes_conf.xml.sample @@ -567,6 +567,8 @@ + + From 12ce9f147823dd3d3b30fd132abe35a95cc2c414 Mon Sep 17 00:00:00 2001 From: Yvan Le Bras Date: Wed, 20 Sep 2023 20:45:47 +0200 Subject: [PATCH 020/334] Update binary.py add Docx binary type --- lib/galaxy/datatypes/binary.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/lib/galaxy/datatypes/binary.py b/lib/galaxy/datatypes/binary.py index 022631195215..49e22a7d114e 100644 --- a/lib/galaxy/datatypes/binary.py +++ b/lib/galaxy/datatypes/binary.py @@ -4540,3 +4540,11 @@ def display_peek(self, dataset: DatasetProtocol) -> str: return dataset.peek except Exception: return f"Binary FITS file size ({nice_size(dataset.get_size())})" + +class Docx(Binary): + """ + Docx file. + Docx files are generated and used by Microsoft word software as a format to store text informations. + """ + + file_ext = "docx" From 7d0dfdf7dba0a1eaed8f25405d1ff26892affc92 Mon Sep 17 00:00:00 2001 From: Yvan Le Bras Date: Wed, 20 Sep 2023 20:46:25 +0200 Subject: [PATCH 021/334] Update datatypes_conf.xml.sample --- lib/galaxy/config/sample/datatypes_conf.xml.sample | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/galaxy/config/sample/datatypes_conf.xml.sample b/lib/galaxy/config/sample/datatypes_conf.xml.sample index 07f6e8aeb910..69dae957ddad 100644 --- a/lib/galaxy/config/sample/datatypes_conf.xml.sample +++ b/lib/galaxy/config/sample/datatypes_conf.xml.sample @@ -568,7 +568,7 @@ - + From 7174abc21fbe28c09cae6dc0684f706fd7d54dc4 Mon Sep 17 00:00:00 2001 From: Yvan Le Bras Date: Thu, 21 Sep 2023 12:30:22 +0200 Subject: [PATCH 022/334] Update datatypes_conf.xml.sample --- lib/galaxy/config/sample/datatypes_conf.xml.sample | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/galaxy/config/sample/datatypes_conf.xml.sample b/lib/galaxy/config/sample/datatypes_conf.xml.sample index 69dae957ddad..76e1059b8c6f 100644 --- a/lib/galaxy/config/sample/datatypes_conf.xml.sample +++ b/lib/galaxy/config/sample/datatypes_conf.xml.sample @@ -568,7 +568,7 @@ - + From 51028aecad5c643c5287b9467285bf6152fa014f Mon Sep 17 00:00:00 2001 From: Yvan Le Bras Date: Thu, 21 Sep 2023 12:30:52 +0200 Subject: [PATCH 023/334] Update binary.py --- lib/galaxy/datatypes/binary.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/lib/galaxy/datatypes/binary.py b/lib/galaxy/datatypes/binary.py index 49e22a7d114e..022631195215 100644 --- a/lib/galaxy/datatypes/binary.py +++ b/lib/galaxy/datatypes/binary.py @@ -4540,11 +4540,3 @@ def display_peek(self, dataset: DatasetProtocol) -> str: return dataset.peek except Exception: return f"Binary FITS file size ({nice_size(dataset.get_size())})" - -class Docx(Binary): - """ - Docx file. - Docx files are generated and used by Microsoft word software as a format to store text informations. - """ - - file_ext = "docx" From 1763fe6a6198cb88a58a6ee73b6ed91a23153c8c Mon Sep 17 00:00:00 2001 From: Helena Rasche Date: Mon, 25 Sep 2023 10:48:35 +0200 Subject: [PATCH 024/334] Enable cors, sc1, add for tabular --- lib/galaxy/config/sample/datatypes_conf.xml.sample | 1 + .../datatypes/display_applications/configs/minerva/tabular.xml | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/galaxy/config/sample/datatypes_conf.xml.sample b/lib/galaxy/config/sample/datatypes_conf.xml.sample index 20fc50314852..6ea4fcc029c4 100644 --- a/lib/galaxy/config/sample/datatypes_conf.xml.sample +++ b/lib/galaxy/config/sample/datatypes_conf.xml.sample @@ -434,6 +434,7 @@ + diff --git a/lib/galaxy/datatypes/display_applications/configs/minerva/tabular.xml b/lib/galaxy/datatypes/display_applications/configs/minerva/tabular.xml index dfd461ed9678..bda8d736cb2f 100644 --- a/lib/galaxy/datatypes/display_applications/configs/minerva/tabular.xml +++ b/lib/galaxy/datatypes/display_applications/configs/minerva/tabular.xml @@ -2,6 +2,6 @@ https://covid19map.elixir-luxembourg.org/minerva/?plugins=3ca603e96a3412d52201ab76d5377ae3&datasource=${data.url} ${ $dataset.dbkey == "wuhCor1" or $dataset.dbkey == "NC_045512" } - + From da8ce00d88503ff6de95cf5b0ba8a6e6d43d0ca3 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Wed, 28 Jun 2023 09:09:31 -0400 Subject: [PATCH 025/334] Cleanup docs in admin_cleanup_datasets.py. --- scripts/cleanup_datasets/admin_cleanup_datasets.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/cleanup_datasets/admin_cleanup_datasets.py b/scripts/cleanup_datasets/admin_cleanup_datasets.py index cc24664eb011..f9050c58e87e 100755 --- a/scripts/cleanup_datasets/admin_cleanup_datasets.py +++ b/scripts/cleanup_datasets/admin_cleanup_datasets.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ Mark datasets as deleted that are older than specified cutoff -and (optionaly) with a tool_id that matches the specified search +and (optionally) with a tool_id that matches the specified search string. This script is useful for administrators to cleanup after users who @@ -121,10 +121,10 @@ def main(): default=False, ) parser.add_argument( - "--smtp", default=None, help="SMTP Server to use to send email. " "Default: [read from galaxy ini file]" + "--smtp", default=None, help="SMTP Server to use to send email. " "Default: [read from galaxy config file]" ) parser.add_argument( - "--fromaddr", default=None, help="From address to use to send email. " "Default: [read from galaxy ini file]" + "--fromaddr", default=None, help="From address to use to send email. " "Default: [read from galaxy config file]" ) populate_config_args(parser) From 360b327de6616ba774cd473c99c6a7e78013cf45 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Thu, 29 Jun 2023 10:24:39 -0400 Subject: [PATCH 026/334] Augment pgcleanup to allow periodically deleting old datasets. ... and restricting operations to specific object store ids. --- lib/galaxy_test/base/populators.py | 22 ++ scripts/cleanup_datasets/pgcleanup.py | 76 ++++++- test/integration/test_scripts.py | 90 ++++---- test/integration/test_scripts_pgcleanup.py | 234 +++++++++++++++++++++ 4 files changed, 364 insertions(+), 58 deletions(-) create mode 100644 test/integration/test_scripts_pgcleanup.py diff --git a/lib/galaxy_test/base/populators.py b/lib/galaxy_test/base/populators.py index cd6d72f9388b..48631597179a 100644 --- a/lib/galaxy_test/base/populators.py +++ b/lib/galaxy_test/base/populators.py @@ -1022,6 +1022,28 @@ def run_collection_creates_list(self, history_id: str, hdca_id: str) -> Response self.wait_for_history(history_id, assert_ok=True) return self.run_tool("collection_creates_list", inputs, history_id) + def new_error_dataset(self, history_id: str) -> str: + payload = self.run_tool_payload( + tool_id="test_data_source", + inputs={ + "URL": f"file://{os.path.join(os.getcwd(), 'README.rst')}", + "URL_method": "get", + "data_type": "bed", + }, + history_id=history_id, + ) + create_response = self._post("tools", data=payload) + api_asserts.assert_status_code_is(create_response, 200) + create_object = create_response.json() + api_asserts.assert_has_keys(create_object, "outputs") + assert len(create_object["outputs"]) == 1 + output = create_object["outputs"][0] + self.wait_for_history(history_id, assert_ok=False) + # wait=False to allow errors + output_details = self.get_history_dataset_details(history_id, dataset=output, wait=False) + assert output_details["state"] == "error", output_details + return output_details["id"] + def run_exit_code_from_file(self, history_id: str, hdca_id: str) -> dict: exit_code_inputs = { "input": {"batch": True, "values": [{"src": "hdca", "id": hdca_id}]}, diff --git a/scripts/cleanup_datasets/pgcleanup.py b/scripts/cleanup_datasets/pgcleanup.py index ff9486304583..0c7f01bd9bee 100755 --- a/scripts/cleanup_datasets/pgcleanup.py +++ b/scripts/cleanup_datasets/pgcleanup.py @@ -111,6 +111,10 @@ def __init__(self, app): self._debug = app.args.debug self._update_time = app.args.update_time self._force_retry = app.args.force_retry + if app.args.object_store_id: + self._object_store_id_sql = f" AND dataset.object_store_id = '{app.args.object_store_id}'" + else: + self._object_store_id_sql = "" self._epoch_time = str(int(time.time())) self._days = app.args.days self._config = app.config @@ -200,6 +204,7 @@ def sql(self): update_time_sql=self._update_time_sql, force_retry_sql=self._force_retry_sql, epoch_time=self._epoch_time, + object_store_id_sql=self._object_store_id_sql, ) @property @@ -359,6 +364,7 @@ def sql(self): update_time_sql=self._update_time_sql, force_retry_sql=self._force_retry_sql, epoch_time=self._epoch_time, + object_store_id_sql=self._object_store_id_sql, ) @@ -844,17 +850,68 @@ class PurgeDeletedHDAs(PurgesHDAs, RemovesMetadataFiles, RequiresDiskUsageRecalc ) +class PurgeOldHDAs(PurgesHDAs, RemovesMetadataFiles, RequiresDiskUsageRecalculation, Action): + """ + - Mark purged all HistoryDatasetAssociations that are older than the specified number of days. + - Mark deleted all MetadataFiles whose hda_id is purged in this step. + - Mark deleted all ImplicitlyConvertedDatasetAssociations whose hda_parent_id is purged in this + step. + - Mark purged all HistoryDatasetAssociations for which an ImplicitlyConvertedDatasetAssociation + with matching hda_id is deleted in this step. + """ + + force_retry_sql = " AND NOT history_dataset_association.purged" + _action_sql = """ + WITH purged_hda_ids + AS ( UPDATE history_dataset_association + SET purged = true, deleted = true{update_time_sql} + FROM dataset + WHERE history_dataset_association.dataset_id = dataset.id AND + dataset.create_time < (NOW() AT TIME ZONE 'utc' - interval '%(days)s days') + {force_retry_sql} {object_store_id_sql} + RETURNING history_dataset_association.id, + history_id), + hda_events + AS (INSERT INTO cleanup_event_hda_association + (create_time, cleanup_event_id, hda_id) + SELECT NOW() AT TIME ZONE 'utc', %(event_id)s, id + FROM purged_hda_ids), + {purge_hda_dependencies_sql} + SELECT purged_hda_ids.id AS purged_hda_id, + history.user_id AS recalculate_disk_usage_user_id, + deleted_metadata_file_ids.id AS deleted_metadata_file_id, + deleted_metadata_file_ids.uuid AS deleted_metadata_file_uuid, + deleted_metadata_file_ids.object_store_id AS object_store_id, + deleted_icda_ids.id AS deleted_icda_id, + deleted_icda_ids.hda_id AS deleted_icda_hda_id + FROM purged_hda_ids + LEFT OUTER JOIN history + ON purged_hda_ids.history_id = history.id + LEFT OUTER JOIN deleted_metadata_file_ids + ON deleted_metadata_file_ids.hda_id = purged_hda_ids.id + LEFT OUTER JOIN deleted_icda_ids + ON deleted_icda_ids.hda_parent_id = purged_hda_ids.id + ORDER BY purged_hda_ids.id + """ + causals = ( + ("purged_hda_id", "deleted_metadata_file_id", "object_store_id"), + ("purged_hda_id", "deleted_icda_id", "deleted_icda_hda_id"), + ) + + class PurgeHistorylessHDAs(PurgesHDAs, RemovesMetadataFiles, RequiresDiskUsageRecalculation, Action): """ - Mark purged all HistoryDatasetAssociations whose history_id is null. """ + force_retry_sql = " AND NOT history_dataset_association.purged" _action_sql = """ WITH purged_hda_ids AS ( UPDATE history_dataset_association SET purged = true, deleted = true{update_time_sql} - WHERE history_id IS NULL{force_retry_sql} - AND update_time < (NOW() AT TIME ZONE 'utc' - interval '%(days)s days') + FROM dataset + WHERE history_id IS NULL{force_retry_sql}{object_store_id_sql} + AND history_dataset_association.update_time < (NOW() AT TIME ZONE 'utc' - interval '%(days)s days') RETURNING id), hda_events AS (INSERT INTO cleanup_event_hda_association @@ -893,7 +950,7 @@ class PurgeErrorHDAs(PurgesHDAs, RemovesMetadataFiles, RequiresDiskUsageRecalcul AS ( UPDATE history_dataset_association SET purged = true, deleted = true{update_time_sql} FROM dataset - WHERE history_dataset_association.dataset_id = dataset.id{force_retry_sql} + WHERE history_dataset_association.dataset_id = dataset.id{force_retry_sql}{object_store_id_sql} AND dataset.state = 'error' AND history_dataset_association.update_time < (NOW() AT TIME ZONE 'utc' - interval '%(days)s days') RETURNING history_dataset_association.id as id, @@ -1037,7 +1094,7 @@ class DeleteExportedHistories(Action): SET deleted = true{update_time_sql} FROM job_export_history_archive WHERE job_export_history_archive.dataset_id = dataset.id - AND NOT deleted + AND NOT deleted {object_store_id_sql} AND dataset.update_time <= (NOW() AT TIME ZONE 'utc' - interval '%(days)s days') RETURNING dataset.id), dataset_events @@ -1063,7 +1120,7 @@ class DeleteDatasets(Action): WITH deleted_dataset_ids AS ( UPDATE dataset SET deleted = true{update_time_sql} - WHERE NOT deleted + WHERE NOT deleted {object_store_id_sql} AND NOT EXISTS (SELECT true FROM library_dataset_dataset_association @@ -1097,7 +1154,7 @@ class PurgeDatasets(RemovesDatasets, Action): WITH purged_dataset_ids AS ( UPDATE dataset SET purged = true{update_time_sql} - WHERE deleted{force_retry_sql} + WHERE deleted{force_retry_sql}{object_store_id_sql} AND update_time < (NOW() AT TIME ZONE 'utc' - interval '%(days)s days') RETURNING id, uuid, @@ -1182,6 +1239,13 @@ def __parse_args(self): default=14, help="Only perform action(s) on objects that have not been updated since the specified number of days", ) + parser.add_argument( + "--object-store-id", + dest="object_store_id", + type=str, + default=None, + help="Only perform action(s) on objects stored in the target object store (for dataset operations - ignored by user/history centric operations)", + ) parser.add_argument( "-U", "--no-update-time", diff --git a/test/integration/test_scripts.py b/test/integration/test_scripts.py index 832a4e9bb8c4..d19bbe1b714e 100644 --- a/test/integration/test_scripts.py +++ b/test/integration/test_scripts.py @@ -16,7 +16,9 @@ from galaxy_test.driver import integration_util -class TestScriptsIntegration(integration_util.IntegrationTestCase): +class BaseScriptsIntegrationTestCase(integration_util.IntegrationTestCase): + dataset_populator: DatasetPopulator + def setUp(self): super().setUp() self.dataset_populator = DatasetPopulator(self.galaxy_interactor) @@ -26,6 +28,39 @@ def setUp(self): def handle_galaxy_config_kwds(cls, config): cls._raw_config = config + def _scripts_check_argparse_help(self, script): + # Test imports and argparse response to --help with 0 exit code. + output = self._scripts_check_output(script, ["--help"]) + # Test -h, --help in printed output message. + assert "-h, --help" in output + + def _scripts_check_output(self, script, args): + cwd = galaxy_directory() + cmd = ["python", os.path.join(cwd, "scripts", script)] + args + clean_env = { + "PATH": os.environ.get("PATH", None), + } # Don't let testing environment variables interfere with config. + try: + return unicodify(subprocess.check_output(cmd, cwd=cwd, env=clean_env)) + except Exception as e: + if isinstance(e, subprocess.CalledProcessError): + raise Exception(f"{unicodify(e)}\nOutput was:\n{unicodify(e.output)}") + raise + + def write_config_file(self): + config_dir = self.config_dir + path = os.path.join(config_dir, "galaxy.yml") + self._test_driver.temp_directories.extend([config_dir]) + config = self._raw_config + # Update config dict with database_connection, which might be set through env variables + config["database_connection"] = self._app.config.database_connection + with open(path, "w") as f: + yaml.dump({"galaxy": config}, f) + + return path + + +class TestScriptsIntegration(BaseScriptsIntegrationTestCase): def test_helper(self): script = "helper.py" self._scripts_check_argparse_help(script) @@ -52,25 +87,10 @@ def test_cleanup(self): assert history_response.status_code == 200 assert history_response.json()["purged"] is True, history_response.json() - def test_pgcleanup(self): - self._skip_unless_postgres() - - script = "cleanup_datasets/pgcleanup.py" + def test_admin_cleanup_datasets(self): + script = "cleanup_datasets/admin_cleanup_datasets.py" self._scripts_check_argparse_help(script) - history_id = self.dataset_populator.new_history() - delete_response = self.dataset_populator._delete(f"histories/{history_id}") - assert delete_response.status_code == 200 - assert delete_response.json()["purged"] is False - config_file = self.write_config_file() - output = self._scripts_check_output( - script, ["-c", config_file, "--older-than", "0", "--sequence", "purge_deleted_histories"] - ) - print(output) - history_response = self.dataset_populator._get(f"histories/{history_id}") - assert history_response.status_code == 200 - assert history_response.json()["purged"] is True, history_response.json() - def test_set_user_disk_usage(self): script = "set_user_disk_usage.py" self._scripts_check_argparse_help(script) @@ -123,9 +143,6 @@ def test_grt_export(self): export = json.load(f) assert export["version"] == 3 - def test_admin_cleanup_datasets(self): - self._scripts_check_argparse_help("cleanup_datasets/admin_cleanup_datasets.py") - def test_secret_decoder_ring(self): script = "secret_decoder_ring.py" self._scripts_check_argparse_help(script) @@ -143,34 +160,3 @@ def test_galaxy_main(self): def test_runtime_stats(self): self._skip_unless_postgres() self._scripts_check_argparse_help("runtime_stats.py") - - def _scripts_check_argparse_help(self, script): - # Test imports and argparse response to --help with 0 exit code. - output = self._scripts_check_output(script, ["--help"]) - # Test -h, --help in printed output message. - assert "-h, --help" in output - - def _scripts_check_output(self, script, args): - cwd = galaxy_directory() - cmd = ["python", os.path.join(cwd, "scripts", script)] + args - clean_env = { - "PATH": os.environ.get("PATH", None), - } # Don't let testing environment variables interfere with config. - try: - return unicodify(subprocess.check_output(cmd, cwd=cwd, env=clean_env)) - except Exception as e: - if isinstance(e, subprocess.CalledProcessError): - raise Exception(f"{unicodify(e)}\nOutput was:\n{unicodify(e.output)}") - raise - - def write_config_file(self): - config_dir = self.config_dir - path = os.path.join(config_dir, "galaxy.yml") - self._test_driver.temp_directories.extend([config_dir]) - config = self._raw_config - # Update config dict with database_connection, which might be set through env variables - config["database_connection"] = self._app.config.database_connection - with open(path, "w") as f: - yaml.dump({"galaxy": config}, f) - - return path diff --git a/test/integration/test_scripts_pgcleanup.py b/test/integration/test_scripts_pgcleanup.py new file mode 100644 index 000000000000..4ecdc9b18abf --- /dev/null +++ b/test/integration/test_scripts_pgcleanup.py @@ -0,0 +1,234 @@ +from typing import List + +from galaxy_test.base.populators import skip_without_tool +from .test_scripts import BaseScriptsIntegrationTestCase + +SCRIPT = "cleanup_datasets/pgcleanup.py" + + +class TestScriptsPgCleanupIntegration(BaseScriptsIntegrationTestCase): + def test_help(self): + self._skip_unless_postgres() + self._scripts_check_argparse_help(SCRIPT) + + def test_purge_deleted_histories(self): + self._skip_unless_postgres() + + history_id = self.dataset_populator.new_history() + delete_response = self.dataset_populator._delete(f"histories/{history_id}") + assert delete_response.status_code == 200 + assert delete_response.json()["purged"] is False + self._pgcleanup_check_output(["--older-than", "0", "--sequence", "purge_deleted_histories"]) + history_response = self.dataset_populator._get(f"histories/{history_id}") + assert history_response.status_code == 200 + assert history_response.json()["purged"] is True, history_response.json() + + def test_purge_old_hdas(self): + self._skip_unless_postgres() + + history_id = self.dataset_populator.new_history() + hda = self.dataset_populator.new_dataset(history_id, wait=True) + assert not self.is_purged(history_id, hda) + + # filtering on a date too old - shouldn't purge the dataset + self._pgcleanup_check_output( + [ + "--older-than", + "1", + "--sequence", + "purge_old_hdas", + ] + ) + assert not self.is_purged(history_id, hda) + + # filtering on invalid object store - shouldn't purge the dataset + self._pgcleanup_check_output( + [ + "--older-than", + "0", + "--object-store-id", + "myfakeobjectstore", + "--sequence", + "purge_old_hdas", + ] + ) + assert not self.is_purged(history_id, hda) + + self._pgcleanup_check_output(["--older-than", "0", "--sequence", "purge_old_hdas"]) + + assert self.is_purged(history_id, hda) + + @skip_without_tool("test_data_source") + def test_purge_errored_hdas(self): + history_id = self.dataset_populator.new_history() + error_dataset = self.dataset_populator.new_error_dataset(history_id) + assert not self.is_purged(history_id, error_dataset) + + # dataset not old enough, shouldn't be purged + self._pgcleanup_check_output( + [ + "--older-than", + "1", + "--sequence", + "purge_error_hdas", + ] + ) + assert not self.is_purged(history_id, error_dataset) + + # dataset not in target object store, shouldn't be purged + self._pgcleanup_check_output( + [ + "--older-than", + "0", + "--object-store-id", + "myfakeobjectstore", + "--sequence", + "purge_error_hdas", + ] + ) + assert not self.is_purged(history_id, error_dataset) + + # okay though, this should purge the dataset + self._pgcleanup_check_output( + [ + "--older-than", + "0", + "--sequence", + "purge_error_hdas", + ] + ) + assert self.is_purged(history_id, error_dataset) + + def test_purge_datasets(self): + self._skip_unless_postgres() + + history_id = self.dataset_populator.new_history() + hda = self.dataset_populator.new_dataset(history_id, wait=True) + self.dataset_populator.delete_dataset(history_id, hda["id"]) + assert not self.is_purged(history_id, hda) + + self._pgcleanup_check_output( + [ + "--older-than", + "1", + "--sequence", + "purge_datasets", + ] + ) + assert not self.is_purged(history_id, hda) + + self._pgcleanup_check_output( + [ + "--older-than", + "0", + "--object-store-id", + "myfakeobjectstore", + "--sequence", + "purge_datasets", + ] + ) + assert not self.is_purged(history_id, hda) + + self._pgcleanup_check_output( + [ + "--older-than", + "0", + "--sequence", + "purge_datasets", + ] + ) + self._pgcleanup_check_output( + [ + "--older-than", + "0", + "--sequence", + "purge_datasets", + ] + ) + + # why is this not purged? + # test or functionality seem broken but better to run through it and ensure + # it isn't breaking anything and everything is syntactically correct than not + # assert self.is_purged(history_id, hda) + + def test_delete_datasets(self): + # this walks through the code to ensure no SQL or Python errors but + # I think we would need to talk to the model layer from the test directly + # to actually produce datasets of the target type for purging and to verify + # they were purged (certainly a possibility) + self._skip_unless_postgres() + + history_id = self.dataset_populator.new_history() + hda = self.dataset_populator.new_dataset(history_id, wait=True) + + assert not self.is_purged(history_id, hda) + + self._pgcleanup_check_output( + [ + "--older-than", + "0", + "--sequence", + "delete_datasets", + ] + ) + self._pgcleanup_check_output( + [ + "--older-than", + "0", + "--object-store-id", + "myfakeobjectstore", + "--sequence", + "delete_datasets", + ] + ) + + assert not self.is_purged(history_id, hda) + + def test_purge_historyless_hdas(self): + # same as above - this is just a negative test for things being broken + # we could access the model layer to write a test to verify the positive + # behavior actually occurs + self._skip_unless_postgres() + + history_id = self.dataset_populator.new_history() + hda = self.dataset_populator.new_dataset(history_id, wait=True) + + assert not self.is_purged(history_id, hda) + self._pgcleanup_check_output( + [ + "--older-than", + "0", + "--sequence", + "purge_historyless_hdas", + ] + ) + self._pgcleanup_check_output( + [ + "--older-than", + "0", + "--object-store-id", + "myfakeobjectstore", + "--sequence", + "purge_historyless_hdas", + ] + ) + + assert not self.is_purged(history_id, hda) + + def is_purged(self, history_id: str, dataset) -> bool: + # set wait=False to prevent errored dataset from erroring out + if isinstance(dataset, str): + details_response = self.dataset_populator.get_history_dataset_details( + history_id, dataset_id=dataset, wait=False + ) + else: + details_response = self.dataset_populator.get_history_dataset_details( + history_id, dataset=dataset, wait=False + ) + return details_response["purged"] + + def _pgcleanup_check_output(self, extra_args: List[str]) -> str: + config_file = self.write_config_file() + output = self._scripts_check_output(SCRIPT, ["-c", config_file] + extra_args) + print(output) + return output From 55bdf14aa23a25186b8bdb830eb1c56ed0bf6898 Mon Sep 17 00:00:00 2001 From: Assunta DeSanto Date: Thu, 28 Sep 2023 11:53:24 -0400 Subject: [PATCH 027/334] converting ISO date time to UTC string --- client/src/components/Markdown/Elements/InvocationTime.vue | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/client/src/components/Markdown/Elements/InvocationTime.vue b/client/src/components/Markdown/Elements/InvocationTime.vue index 63355884e82d..9b59273df24f 100644 --- a/client/src/components/Markdown/Elements/InvocationTime.vue +++ b/client/src/components/Markdown/Elements/InvocationTime.vue @@ -19,7 +19,8 @@ export default { computed: { content() { const invocation = this.invocations[this.args.invocation_id]; - return invocation && invocation["create_time"]; + const iso = new Date(invocation && invocation["create_time"]); + return iso.toUTCString(); }, }, }; From 62c084f5046c772eef71469ee0ead8108f4f3490 Mon Sep 17 00:00:00 2001 From: Assunta DeSanto Date: Fri, 29 Sep 2023 15:27:21 -0400 Subject: [PATCH 028/334] converting date on PDF generated report to a more readable string time instead of iso --- lib/galaxy/managers/markdown_util.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/galaxy/managers/markdown_util.py b/lib/galaxy/managers/markdown_util.py index 4e5219251ccc..0aaf1f50c3d8 100644 --- a/lib/galaxy/managers/markdown_util.py +++ b/lib/galaxy/managers/markdown_util.py @@ -347,7 +347,7 @@ def handle_generate_time(self, line, generate_time): pass def handle_invocation_time(self, line, invocation): - self.ensure_rendering_data_for("invocations", invocation)["create_time"] = invocation.create_time.isoformat() + self.ensure_rendering_data_for("invocations", invocation)["create_time"] = invocation.create_time.strftime("%m/%d/%Y, %H:%M:%S") def handle_dataset_type(self, line, hda): self.extend_history_dataset_rendering_data(hda, "ext", hda.ext, "*Unknown dataset type*") @@ -547,7 +547,7 @@ def handle_generate_time(self, line, generate_time): return (content, True) def handle_invocation_time(self, line, invocation): - content = literal_via_fence(invocation.create_time.isoformat()) + content = literal_via_fence(invocation.create_time.strftime("%m/%d/%Y, %H:%M:%S")) return (content, True) def handle_dataset_name(self, line, hda): From 4603b0ed65416a930548055e5c16a9384833a03b Mon Sep 17 00:00:00 2001 From: Yvan Le Bras Date: Mon, 2 Oct 2023 11:23:19 +0200 Subject: [PATCH 029/334] Update lib/galaxy/config/sample/datatypes_conf.xml.sample MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Björn Grüning --- lib/galaxy/config/sample/datatypes_conf.xml.sample | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/lib/galaxy/config/sample/datatypes_conf.xml.sample b/lib/galaxy/config/sample/datatypes_conf.xml.sample index 76e1059b8c6f..143e48a96aa0 100644 --- a/lib/galaxy/config/sample/datatypes_conf.xml.sample +++ b/lib/galaxy/config/sample/datatypes_conf.xml.sample @@ -567,8 +567,7 @@ - - + From 05e1775291028bafcf6ae2a99606d1df1f9dd1a0 Mon Sep 17 00:00:00 2001 From: Assunta DeSanto Date: Mon, 2 Oct 2023 10:10:26 -0400 Subject: [PATCH 030/334] run make format --- lib/galaxy/managers/markdown_util.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/lib/galaxy/managers/markdown_util.py b/lib/galaxy/managers/markdown_util.py index 0aaf1f50c3d8..40787b321693 100644 --- a/lib/galaxy/managers/markdown_util.py +++ b/lib/galaxy/managers/markdown_util.py @@ -347,7 +347,9 @@ def handle_generate_time(self, line, generate_time): pass def handle_invocation_time(self, line, invocation): - self.ensure_rendering_data_for("invocations", invocation)["create_time"] = invocation.create_time.strftime("%m/%d/%Y, %H:%M:%S") + self.ensure_rendering_data_for("invocations", invocation)["create_time"] = invocation.create_time.strftime( + "%m/%d/%Y, %H:%M:%S" + ) def handle_dataset_type(self, line, hda): self.extend_history_dataset_rendering_data(hda, "ext", hda.ext, "*Unknown dataset type*") From 308df0b14639abd1922298871e5587238ae6e9be Mon Sep 17 00:00:00 2001 From: Assunta DeSanto Date: Mon, 2 Oct 2023 10:10:47 -0400 Subject: [PATCH 031/334] update tests to use string UTC format instead of iso --- test/unit/app/managers/test_markdown_export.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/test/unit/app/managers/test_markdown_export.py b/test/unit/app/managers/test_markdown_export.py index 64c4df408d47..705ffd51bf86 100644 --- a/test/unit/app/managers/test_markdown_export.py +++ b/test/unit/app/managers/test_markdown_export.py @@ -280,7 +280,8 @@ def test_generate_invocation_time(self): invocation = self._new_invocation() self.app.workflow_manager.get_invocation.side_effect = [invocation] # type: ignore[attr-defined,union-attr] result = self._to_basic(example) - assert f"\n {invocation.create_time.isoformat()}" in result + expectedtime = invocation.create_time.strftime("%m/%d/%Y, %H:%M:%S") + assert f"\n {expectedtime}" in result def test_job_parameters(self): job = model.Job() @@ -412,7 +413,7 @@ def test_get_invocation_time(self): result, extra_data = self._ready_export(example) assert "invocations" in extra_data assert "create_time" in extra_data["invocations"]["be8be0fd2ce547f6"] - assert extra_data["invocations"]["be8be0fd2ce547f6"]["create_time"] == invocation.create_time.isoformat() + assert extra_data["invocations"]["be8be0fd2ce547f6"]["create_time"] == invocation.create_time.strftime("%m/%d/%Y, %H:%M:%S") def _ready_export(self, example): return ready_galaxy_markdown_for_export(self.trans, example) From 78b454679d1a9c5803df1baf02922036bde99c05 Mon Sep 17 00:00:00 2001 From: Assunta DeSanto Date: Mon, 2 Oct 2023 11:44:40 -0400 Subject: [PATCH 032/334] fixing formatting --- test/unit/app/managers/test_markdown_export.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/test/unit/app/managers/test_markdown_export.py b/test/unit/app/managers/test_markdown_export.py index 705ffd51bf86..db3f59e7f5dc 100644 --- a/test/unit/app/managers/test_markdown_export.py +++ b/test/unit/app/managers/test_markdown_export.py @@ -413,7 +413,9 @@ def test_get_invocation_time(self): result, extra_data = self._ready_export(example) assert "invocations" in extra_data assert "create_time" in extra_data["invocations"]["be8be0fd2ce547f6"] - assert extra_data["invocations"]["be8be0fd2ce547f6"]["create_time"] == invocation.create_time.strftime("%m/%d/%Y, %H:%M:%S") + assert extra_data["invocations"]["be8be0fd2ce547f6"]["create_time"] == invocation.create_time.strftime( + "%m/%d/%Y, %H:%M:%S" + ) def _ready_export(self, example): return ready_galaxy_markdown_for_export(self.trans, example) From 09dcba76e44ed630caa774d410b99fa9e2212ea1 Mon Sep 17 00:00:00 2001 From: Assunta DeSanto Date: Tue, 10 Oct 2023 11:31:14 -0400 Subject: [PATCH 033/334] Update client/src/components/Markdown/Elements/InvocationTime.vue Co-authored-by: Marius van den Beek --- client/src/components/Markdown/Elements/InvocationTime.vue | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/client/src/components/Markdown/Elements/InvocationTime.vue b/client/src/components/Markdown/Elements/InvocationTime.vue index 9b59273df24f..b335bb377974 100644 --- a/client/src/components/Markdown/Elements/InvocationTime.vue +++ b/client/src/components/Markdown/Elements/InvocationTime.vue @@ -19,8 +19,7 @@ export default { computed: { content() { const invocation = this.invocations[this.args.invocation_id]; - const iso = new Date(invocation && invocation["create_time"]); - return iso.toUTCString(); + return invocation && new Date(invocation["create_time"]).toUTCString(); }, }, }; From 557b06e67f2db84642926c5526c332f238bcc52c Mon Sep 17 00:00:00 2001 From: guerler Date: Sun, 1 Oct 2023 14:04:44 +0300 Subject: [PATCH 034/334] Replace logout modal in user preferences --- .../src/components/User/UserPreferences.vue | 23 +++++++++---------- 1 file changed, 11 insertions(+), 12 deletions(-) diff --git a/client/src/components/User/UserPreferences.vue b/client/src/components/User/UserPreferences.vue index 681c0e369fba..d64e60400398 100644 --- a/client/src/components/User/UserPreferences.vue +++ b/client/src/components/User/UserPreferences.vue @@ -106,7 +106,15 @@ icon="fa-sign-out" title="Sign Out" description="Click here to sign out of all sessions." - @click="signOut" /> + @click="toggleLogout = !toggleLogout" /> + + Do you want to continue and sign out of all active sessions? + @@ -166,6 +174,7 @@ export default { messageVariant: null, message: null, toggleActivityBar: false, + toggleLogout: false, toggleTheme: false, }; }, @@ -232,17 +241,7 @@ export default { } }, signOut() { - const Galaxy = getGalaxyInstance(); - Galaxy.modal.show({ - title: _l("Sign out"), - body: "Do you want to continue and sign out of all active sessions?", - buttons: { - Cancel: function () { - Galaxy.modal.hide(); - }, - "Sign out": userLogoutAll, - }, - }); + userLogoutAll(); }, }, }; From 88e24e1f72867cd27e7f1c65ecbb31109dd3aa51 Mon Sep 17 00:00:00 2001 From: guerler Date: Sun, 1 Oct 2023 14:17:09 +0300 Subject: [PATCH 035/334] Replace data privacy modal in user preferences --- .../src/components/User/UserPreferences.vue | 33 ++++++++++--------- 1 file changed, 17 insertions(+), 16 deletions(-) diff --git a/client/src/components/User/UserPreferences.vue b/client/src/components/User/UserPreferences.vue index d64e60400398..327a2c17ff46 100644 --- a/client/src/components/User/UserPreferences.vue +++ b/client/src/components/User/UserPreferences.vue @@ -106,9 +106,17 @@ icon="fa-sign-out" title="Sign Out" description="Click here to sign out of all sessions." - @click="toggleLogout = !toggleLogout" /> + @click="showLogoutModal = true" /> + + + All of your histories and datasets have been made private. If you'd like to make all *future* histories + private please use the + + User Permissions + interface. + 1 ?? false; }, + userPermissionsUrl() { + return withPrefix("/user/permissions"); + }, }, created() { const message = QueryStringParsing.get("message"); @@ -211,7 +223,6 @@ export default { }, methods: { makeDataPrivate() { - const Galaxy = getGalaxyInstance(); if ( confirm( _l( @@ -225,18 +236,8 @@ export default { ) ) ) { - axios.post(withPrefix(`/history/make_private?all_histories=true`)).then((response) => { - Galaxy.modal.show({ - title: _l("Datasets are now private"), - body: `All of your histories and datasets have been made private. If you'd like to make all *future* histories private please use the User Permissions interface.`, - buttons: { - Close: () => { - Galaxy.modal.hide(); - }, - }, - }); + axios.post(withPrefix(`/history/make_private?all_histories=true`)).then(() => { + this.showDataPrivateModal = true; }); } }, From 0d02022e6730d6d2cc31f2d741204ccb1f703655 Mon Sep 17 00:00:00 2001 From: Dannon Baker Date: Wed, 11 Oct 2023 11:10:19 -0400 Subject: [PATCH 036/334] Toolshed client removal --- client/src/bundleToolshed.js | 11 -- client/src/style/scss/base.scss | 1 - client/src/style/scss/toolshed.scss | 40 ----- client/src/style/scss/unsorted.scss | 1 - .../src/toolshed/groups/group-detail-view.js | 141 ------------------ client/src/toolshed/groups/group-list-view.js | 130 ---------------- .../src/toolshed/groups/group-listrow-view.js | 35 ----- client/src/toolshed/groups/group-model.js | 20 --- client/src/toolshed/toolshed.groups.js | 43 ------ 9 files changed, 422 deletions(-) delete mode 100644 client/src/bundleToolshed.js delete mode 100644 client/src/style/scss/toolshed.scss delete mode 100644 client/src/toolshed/groups/group-detail-view.js delete mode 100644 client/src/toolshed/groups/group-list-view.js delete mode 100644 client/src/toolshed/groups/group-listrow-view.js delete mode 100644 client/src/toolshed/groups/group-model.js delete mode 100644 client/src/toolshed/toolshed.groups.js diff --git a/client/src/bundleToolshed.js b/client/src/bundleToolshed.js deleted file mode 100644 index c2510b540b17..000000000000 --- a/client/src/bundleToolshed.js +++ /dev/null @@ -1,11 +0,0 @@ -/** - * The toolshed list of globals we expose in window.bundleToolshed used by Toolshed makos. - */ - -/* jquery and _ are exposed via expose-loader while several external plugins rely on these */ -import $ from "jquery"; // eslint-disable-line no-unused-vars -import _ from "underscore"; // eslint-disable-line no-unused-vars - -export { default as LegacyGridView } from "legacy/grid/grid-view"; -export { default as store } from "storemodern"; -export { default as ToolshedGroups } from "toolshed/toolshed.groups"; diff --git a/client/src/style/scss/base.scss b/client/src/style/scss/base.scss index 436e3d963829..0b08c0aa0af1 100644 --- a/client/src/style/scss/base.scss +++ b/client/src/style/scss/base.scss @@ -37,7 +37,6 @@ $fa-font-path: "../../../node_modules/@fortawesome/fontawesome-free/webfonts/"; @import "charts.scss"; @import "message.scss"; @import "markdown.scss"; -@import "toolshed.scss"; @import "multiselect.scss"; @import "icon-btn.scss"; @import "peek-columns.scss"; diff --git a/client/src/style/scss/toolshed.scss b/client/src/style/scss/toolshed.scss deleted file mode 100644 index 72a4c8b68852..000000000000 --- a/client/src/style/scss/toolshed.scss +++ /dev/null @@ -1,40 +0,0 @@ -@import "theme/blue.scss"; -.shed-style-container { - width: 95%; - margin: auto; - margin-top: 1em; - overflow: auto !important; - .header { - h2 { - float: left; - } - span { - float: right; - } - } -} -.ui-autocomplete { - background-color: #fff; - li.ui-menu-item { - list-style-type: none; - } -} - -.masthead-simple { - background-color: $brand-dark; - height: $masthead-height; - .navbar-brand { - position: absolute; - left: 0.5rem; - .navbar-brand-image { - max-height: 2rem; - } - } - .navbar-brand-title { - color: $brand-light; - font-size: 1.1rem; - } - .nav-link { - color: $brand-light !important; - } -} diff --git a/client/src/style/scss/unsorted.scss b/client/src/style/scss/unsorted.scss index adbf5f8679e4..4c3799859c8f 100644 --- a/client/src/style/scss/unsorted.scss +++ b/client/src/style/scss/unsorted.scss @@ -932,7 +932,6 @@ div.toolSectionTitle { } /* Toolshed, reports custom styles from mako templates */ -body.toolshed, body.reports { margin: 0; padding: 0; diff --git a/client/src/toolshed/groups/group-detail-view.js b/client/src/toolshed/groups/group-detail-view.js deleted file mode 100644 index bb4836c95056..000000000000 --- a/client/src/toolshed/groups/group-detail-view.js +++ /dev/null @@ -1,141 +0,0 @@ -import Backbone from "backbone"; -import $ from "jquery"; -import mod_group_model from "toolshed/groups/group-model"; -import { Toast } from "ui/toast"; -import _ from "underscore"; - -// toolshed group detail view -const GroupDetailView = Backbone.View.extend({ - el: "#groups_element", - options: {}, - app: null, - - initialize: function (options) { - this.options = _.extend(this.options, options); - this.app = window.globalTS.groups; - - if (this.app.collection !== null) { - this.model = this.app.collection.get(this.options.group_id); - this.render(); - } else { - this.fetchGroup(); - } - }, - - fetchGroup: function (options) { - const that = this; - this.options = _.extend(this.options, options); - this.model = new mod_group_model.Group({ id: this.options.group_id }); - this.model.fetch({ - success: function (model) { - console.log("received data: "); - console.log(model); - that.render(); - }, - error: function (model, response) { - if (typeof response.responseJSON !== "undefined") { - Toast.error(response.responseJSON.err_msg); - } else { - Toast.error("An error occurred."); - } - }, - }); - }, - - render: function () { - const template = this.templateRow(); - this.$el.html(template({ group: this.model })); - $('#center [data-toggle="tooltip"]').tooltip({ trigger: "hover" }); - $("#center").css("overflow", "auto"); - }, - - templateRow: function () { - return _.template( - [ - "
", - '

<%= _.escape(group.get("name")) %>

', - '

', - 'A group of <%= group.get("members").length %> members with <%= group.get("repositories").length %> repositories and a total of <%= group.get("total_downloads") %> combined repository clones.

', - - "

Members

", - '', - "", - "", - "", - "", - "", - "", - '<% _.each(group.get("members"), function(member) { %>', - "", - "", - "", - "", - "", - "<% }); %>", - "", - "
NameRepositoriesRegistered
", - "<%= _.escape(member.username) %>", - "", - '<%= member.user_repos_count %>', - "", - "<%= member.time_created %>", - "
", - - "

Repositories

", - '', - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - '<% _.each(group.get("repositories"), function(repo) { %>', - "", - "", - "", - "", - "", - "", - '", - '", - "", - "", - "", - "<% }); %>", - "", - "
NameDescriptionClonesOwnerCategoriesCreatedUpdatedRatingVerified
", - '<%= _.escape(repo.name) %>', - "", - "<%= _.escape(repo.description) %>", - "", - "<%= repo.times_downloaded %>", - "", - "<%= _.escape(repo.owner) %>", - "", - "<% _.each((repo.categories), function(cat) { %>", - '<%= cat.name %>
', - // '<%= repo.categories %>', - "<% }); %>", - "
', - "<%= repo.time_created %>", - "', - "<%= repo.time_updated %>", - "", - "<%= repo.ratings_mean %>", - "", - "<%= repo.approved %>", - "
", - "
", - ].join("") - ); - }, -}); -export default { - GroupDetailView: GroupDetailView, -}; diff --git a/client/src/toolshed/groups/group-list-view.js b/client/src/toolshed/groups/group-list-view.js deleted file mode 100644 index 6d21347f8144..000000000000 --- a/client/src/toolshed/groups/group-list-view.js +++ /dev/null @@ -1,130 +0,0 @@ -import Backbone from "backbone"; -import $ from "jquery"; -import mod_group_row from "toolshed/groups/group-listrow-view"; -import mod_group_model from "toolshed/groups/group-model"; -import { Toast } from "ui/toast"; -import _ from "underscore"; - -const GroupListView = Backbone.View.extend({ - el: "#groups_element", - defaults: {}, - - /** - * Initialize and fetch the groups from server. - * Async render afterwards. - * @param {object} options an object with options - */ - initialize: function (options) { - this.options = _.defaults(this.options || {}, this.defaults, options); - const that = this; - window.globalTS.groups.collection = new mod_group_model.Groups(); - window.globalTS.groups.collection.fetch({ - success: function (model) { - that.render(); - }, - error: function (model, response) { - if (typeof response.responseJSON !== "undefined") { - Toast.error(response.responseJSON.err_msg); - } else { - Toast.error("An error occurred."); - } - }, - }); - }, - - fetch: function () {}, - - /** - * Render the groups table from the object's own collection. - */ - render: function (options) { - this.options = _.extend(this.options, options); - $(".tooltip").hide(); - const template = this.templateGroupsList(); - this.$el.html(template({ length: window.globalTS.groups.collection.models.length })); - this.renderRows(window.globalTS.groups.collection.models); - $('#center [data-toggle="tooltip"]').tooltip({ trigger: "hover" }); - $("#center").css("overflow", "auto"); - }, - - /** - * Render all given models as rows in the groups list - * @param {array} groups_to_render array of group models to render - */ - renderRows: function (groups_to_render) { - for (let i = 0; i < groups_to_render.length; i++) { - const group = groups_to_render[i]; - this.renderOne({ group: group }); - } - }, - - /** - * Create a view for the given model and add it to the groups view. - * @param {Group} model of the view that will be rendered - */ - renderOne: function (options) { - const rowView = new mod_group_row.GroupListRowView(options); - this.$el.find("#group_list_body").append(rowView.el); - }, - - /** - * Table heading was clicked, update sorting preferences and re-render. - * @return {[type]} [description] - */ - // sort_clicked : function(){ - // if (Galaxy.libraries.preferences.get('sort_order') === 'asc'){ - // Galaxy.libraries.preferences.set({'sort_order': 'desc'}); - // } else { - // Galaxy.libraries.preferences.set({'sort_order': 'asc'}); - // } - // this.render(); - // }, - - /** - * Sort the underlying collection according to the parameters received. - * Currently supports only sorting by name. - */ - // sortLibraries: function(){ - // if (Galaxy.libraries.preferences.get('sort_by') === 'name'){ - // if (Galaxy.libraries.preferences.get('sort_order') === 'asc'){ - // this.collection.sortByNameAsc(); - // } else if (Galaxy.libraries.preferences.get('sort_order') === 'desc'){ - // this.collection.sortByNameDesc(); - // } - // } - // }, - - // MMMMMMMMMMMMMMMMMM - // === TEMPLATES ==== - // MMMMMMMMMMMMMMMMMM - - templateGroupsList: function () { - const tmpl_array = []; - - tmpl_array.push('
'); - tmpl_array.push("
"); - tmpl_array.push('
'); - tmpl_array.push("<% if(length === 0) { %>"); - tmpl_array.push("
There are no groups yet.
"); - tmpl_array.push("<% } else{ %>"); - tmpl_array.push(''); - tmpl_array.push(" "); - tmpl_array.push(" "); - // tmpl_array.push(' '); - tmpl_array.push(" "); - tmpl_array.push(" "); - tmpl_array.push(" "); - tmpl_array.push(' '); - // group item views will attach here - tmpl_array.push(" "); - tmpl_array.push("
NamedescriptionMembersRepositories
"); - tmpl_array.push("<% }%>"); - tmpl_array.push("
"); - - return _.template(tmpl_array.join("")); - }, -}); - -export default { - GroupListView: GroupListView, -}; diff --git a/client/src/toolshed/groups/group-listrow-view.js b/client/src/toolshed/groups/group-listrow-view.js deleted file mode 100644 index 8acfa31e7634..000000000000 --- a/client/src/toolshed/groups/group-listrow-view.js +++ /dev/null @@ -1,35 +0,0 @@ -import Backbone from "backbone"; -import _ from "underscore"; - -// toolshed group row view -const GroupListRowView = Backbone.View.extend({ - events: {}, - - initialize: function (options) { - this.render(options.group); - }, - - render: function (group) { - const tmpl = this.templateRow(); - this.setElement(tmpl({ group: group })); - this.$el.show(); - return this; - }, - - templateRow: function () { - return _.template( - [ - '">', - '"><%= _.escape(group.get("name")) %>', - // 'description', - '<%= group.get("total_members") %>', - '<%= group.get("total_repos") %>', - "", - ].join("") - ); - }, -}); - -export default { - GroupListRowView: GroupListRowView, -}; diff --git a/client/src/toolshed/groups/group-model.js b/client/src/toolshed/groups/group-model.js deleted file mode 100644 index e001830830d6..000000000000 --- a/client/src/toolshed/groups/group-model.js +++ /dev/null @@ -1,20 +0,0 @@ -import Backbone from "backbone"; -import { getAppRoot } from "onload/loadConfig"; - -// ============================================================================ -// TS GROUP RELATED MODELS - -const Group = Backbone.Model.extend({ - urlRoot: `${getAppRoot()}api/groups`, -}); - -const Groups = Backbone.Collection.extend({ - url: `${getAppRoot()}api/groups`, - - model: Group, -}); - -export default { - Group: Group, - Groups: Groups, -}; diff --git a/client/src/toolshed/toolshed.groups.js b/client/src/toolshed/toolshed.groups.js deleted file mode 100644 index 89fb79623d45..000000000000 --- a/client/src/toolshed/toolshed.groups.js +++ /dev/null @@ -1,43 +0,0 @@ -// MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM -// === MAIN TOOLSHED GROUP MODULE ==== -// MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM - -import Backbone from "backbone"; -import mod_group_detail from "toolshed/groups/group-detail-view"; -import mod_group_list from "toolshed/groups/group-list-view"; - -// ============================================================================ -// ROUTER -const ToolshedRouter = Backbone.Router.extend({ - routes: { - "": "groups", - ":group_id": "group_page", - }, -}); - -const ToolshedGroups = Backbone.View.extend({ - groupListView: null, - groupDetailView: null, - collection: null, - - initialize: function () { - window.globalTS.groups = this; - - this.ts_router = new ToolshedRouter(); - this.ts_router.on("route:groups", () => { - window.globalTS.groups.groupListView = new mod_group_list.GroupListView(); - }); - - this.ts_router.on("route:group_page", (group_id) => { - window.globalTS.groups.groupDetailView = new mod_group_detail.GroupDetailView({ - group_id: group_id, - }); - }); - - Backbone.history.start({ pushState: false }); - }, -}); - -export default { - ToolshedGroups: ToolshedGroups, -}; From 5ebd1e70a7657b2c9436a6684dd7ba3b9ff2fe26 Mon Sep 17 00:00:00 2001 From: Enis Afgan Date: Thu, 12 Oct 2023 17:16:43 -0400 Subject: [PATCH 037/334] Specify uid for the k8s image --- .k8s_ci.Dockerfile | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.k8s_ci.Dockerfile b/.k8s_ci.Dockerfile index ba9d4b589036..65067806671e 100644 --- a/.k8s_ci.Dockerfile +++ b/.k8s_ci.Dockerfile @@ -166,7 +166,7 @@ RUN set -xe; \ # Create Galaxy user, group, directory; chown RUN set -xe; \ - adduser --system --group $GALAXY_USER \ + adduser --system --group --uid 101 $GALAXY_USER \ && mkdir -p $SERVER_DIR \ && chown $GALAXY_USER:$GALAXY_USER $ROOT_DIR -R @@ -179,7 +179,8 @@ COPY --chown=$GALAXY_USER:$GALAXY_USER --from=client_build $SERVER_DIR/static ./ WORKDIR $SERVER_DIR # The data in version.json will be displayed in Galaxy's /api/version endpoint -RUN printf "{\n \"git_commit\": \"$(cat GITREVISION)\",\n \"build_date\": \"$BUILD_DATE\",\n \"image_tag\": \"$IMAGE_TAG\"\n}\n" > version.json +RUN printf "{\n \"git_commit\": \"$(cat GITREVISION)\",\n \"build_date\": \"$BUILD_DATE\",\n \"image_tag\": \"$IMAGE_TAG\"\n}\n" > version.json \ + && chown $GALAXY_USER:$GALAXY_USER version.json EXPOSE 8080 USER $GALAXY_USER From 298637da37dc6f92ab4305d9185e9f83d3551194 Mon Sep 17 00:00:00 2001 From: guerler Date: Fri, 13 Oct 2023 12:26:29 +0300 Subject: [PATCH 038/334] Fix selenium selector for signout modal --- client/src/utils/navigation/navigation.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/client/src/utils/navigation/navigation.yml b/client/src/utils/navigation/navigation.yml index 84a1d2e7f8ad..7fd93b6c289b 100644 --- a/client/src/utils/navigation/navigation.yml +++ b/client/src/utils/navigation/navigation.yml @@ -122,8 +122,8 @@ change_user_address: sign_out: selectors: - cancel_button: '.modal-footer .buttons #button-0' - sign_out_button: '.modal-footer .buttons #button-1' + cancel_button: '.modal-footer .btn-secondary' + sign_out_button: '.modal-footer .btn-primary' dataset_details: selectors: From c62b2d9d20c6d3cad5b417da1ce9acc23da4f5a8 Mon Sep 17 00:00:00 2001 From: mvdbeek Date: Sun, 15 Oct 2023 13:00:42 +0200 Subject: [PATCH 039/334] Raise exception if persisting HDA will null file_size --- lib/galaxy/model/__init__.py | 1 + lib/galaxy/model/base.py | 8 ++++++-- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 6652ff72814d..cb3601e279fd 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -3870,6 +3870,7 @@ class Dataset(Base, StorableObject, Serializable): non_ready_states = (states.NEW, states.UPLOAD, states.QUEUED, states.RUNNING, states.SETTING_METADATA) ready_states = tuple(set(states.__members__.values()) - set(non_ready_states)) valid_input_states = tuple(set(states.__members__.values()) - {states.ERROR, states.DISCARDED}) + no_data_states = (states.PAUSED, states.DEFERRED, states.DISCARDED, *non_ready_states) terminal_states = ( states.OK, states.EMPTY, diff --git a/lib/galaxy/model/base.py b/lib/galaxy/model/base.py index d5326c785b20..bcd76da2c6a8 100644 --- a/lib/galaxy/model/base.py +++ b/lib/galaxy/model/base.py @@ -152,9 +152,13 @@ def versioned_objects_strict(iter): # These should get some other type of permanent storage, perhaps UserDatasetAssociation ? # Everything else needs to have a hid and a history if not obj.history and not obj.history_id: - raise Exception(f"HistoryDatsetAssociation {obj} without history detected, this is not valid") + raise Exception(f"HistoryDatasetAssociation {obj} without history detected, this is not valid") elif not obj.hid: - raise Exception(f"HistoryDatsetAssociation {obj} without has no hid, this is not valid") + raise Exception(f"HistoryDatasetAssociation {obj} without hid, this is not valid") + elif obj.dataset.file_size is None and obj.dataset.state not in obj.dataset.no_data_states: + raise Exception( + f"HistoryDatasetAssociation {obj} in state {obj.dataset.state} with null file size, this is not valid" + ) yield obj From 7a06271971eeee7a5ee932fc1068103a6f258130 Mon Sep 17 00:00:00 2001 From: mvdbeek Date: Sun, 15 Oct 2023 14:37:38 +0200 Subject: [PATCH 040/334] Fix and prevent persisting null file_size --- lib/galaxy/metadata/set_metadata.py | 1 + lib/galaxy/model/store/discover.py | 2 ++ 2 files changed, 3 insertions(+) diff --git a/lib/galaxy/metadata/set_metadata.py b/lib/galaxy/metadata/set_metadata.py index ff9380ece0b4..b512f3f905b5 100644 --- a/lib/galaxy/metadata/set_metadata.py +++ b/lib/galaxy/metadata/set_metadata.py @@ -449,6 +449,7 @@ def set_meta(new_dataset_instance, file_dict): partial(push_if_necessary, object_store, dataset, external_filename) ) object_store_update_actions.append(partial(reset_external_filename, dataset)) + object_store_update_actions.append(partial(dataset.set_total_size)) object_store_update_actions.append(partial(export_store.add_dataset, dataset)) if dataset_instance_id not in unnamed_id_to_path: object_store_update_actions.append(partial(collect_extra_files, object_store, dataset, ".")) diff --git a/lib/galaxy/model/store/discover.py b/lib/galaxy/model/store/discover.py index 832357acb45b..a213327f9618 100644 --- a/lib/galaxy/model/store/discover.py +++ b/lib/galaxy/model/store/discover.py @@ -265,6 +265,8 @@ def set_datasets_metadata(datasets, datasets_attributes=None): except Exception: log.exception("Exception occured while setting dataset peek") + primary_data.set_total_size() + def populate_collection_elements( self, collection, From 1003351485f3f531b0849bccc9b05f883ace148d Mon Sep 17 00:00:00 2001 From: mvdbeek Date: Sun, 15 Oct 2023 16:50:27 +0200 Subject: [PATCH 041/334] Skip unnecessary flush when applying permissions --- lib/galaxy/job_execution/output_collect.py | 2 +- lib/galaxy/model/security.py | 4 ++-- lib/galaxy/model/store/discover.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/galaxy/job_execution/output_collect.py b/lib/galaxy/job_execution/output_collect.py index b9e254150271..3b15f7feace6 100644 --- a/lib/galaxy/job_execution/output_collect.py +++ b/lib/galaxy/job_execution/output_collect.py @@ -91,7 +91,7 @@ def set_default_hda_permissions(self, primary_data): self._security_agent.set_all_dataset_permissions(primary_data.dataset, permissions, new=True, flush=False) def copy_dataset_permissions(self, init_from, primary_data): - self._security_agent.copy_dataset_permissions(init_from.dataset, primary_data.dataset) + self._security_agent.copy_dataset_permissions(init_from.dataset, primary_data.dataset, flush=False) class MetadataSourceProvider(AbstractMetadataSourceProvider): diff --git a/lib/galaxy/model/security.py b/lib/galaxy/model/security.py index 84c3eb5fb976..bb07ea071efe 100644 --- a/lib/galaxy/model/security.py +++ b/lib/galaxy/model/security.py @@ -998,12 +998,12 @@ def get_permissions(self, item): permissions[action] = [item_permission.role] return permissions - def copy_dataset_permissions(self, src, dst): + def copy_dataset_permissions(self, src, dst, flush=True): if not isinstance(src, self.model.Dataset): src = src.dataset if not isinstance(dst, self.model.Dataset): dst = dst.dataset - self.set_all_dataset_permissions(dst, self.get_permissions(src)) + self.set_all_dataset_permissions(dst, self.get_permissions(src), flush=flush) def privately_share_dataset(self, dataset, users=None): dataset.ensure_shareable() diff --git a/lib/galaxy/model/store/discover.py b/lib/galaxy/model/store/discover.py index a213327f9618..634ce6048eec 100644 --- a/lib/galaxy/model/store/discover.py +++ b/lib/galaxy/model/store/discover.py @@ -125,7 +125,7 @@ def create_dataset( if init_from: self.permission_provider.copy_dataset_permissions(init_from, primary_data) - primary_data.state = init_from.state + primary_data.raw_set_dataset_state(init_from.state) else: self.permission_provider.set_default_hda_permissions(primary_data) else: From 6d68b78f151f3bc99f802d00572c82168a31616f Mon Sep 17 00:00:00 2001 From: mvdbeek Date: Sun, 15 Oct 2023 22:06:47 +0200 Subject: [PATCH 042/334] Set dataset size also when failing jobs --- lib/galaxy/jobs/__init__.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/lib/galaxy/jobs/__init__.py b/lib/galaxy/jobs/__init__.py index f70d7250bad4..2b2a19ad34db 100644 --- a/lib/galaxy/jobs/__init__.py +++ b/lib/galaxy/jobs/__init__.py @@ -1430,8 +1430,6 @@ def fail( dataset.state = dataset.states.ERROR dataset.blurb = "tool error" dataset.info = message - dataset.set_size() - dataset.dataset.set_total_size() dataset.mark_unhidden() if dataset.ext == "auto": dataset.extension = "data" @@ -1738,7 +1736,6 @@ def _finish_dataset(self, output_name, dataset, job, context, final_job_state, r # Ensure white space between entries dataset.info = f"{dataset.info.rstrip()}\n{context['stderr'].strip()}" dataset.tool_version = self.version_string - dataset.set_size() if "uuid" in context: dataset.dataset.uuid = context["uuid"] self.__update_output(job, dataset) @@ -2423,6 +2420,7 @@ def __update_output(self, job, hda, clean_only=False): cleaned up if the dataset has been purged. """ dataset = hda.dataset + dataset.set_total_size() if dataset not in job.output_library_datasets: purged = dataset.purged if not purged and not clean_only: From 23d938f7f90546006e789ede77e2cb6dd80dfd18 Mon Sep 17 00:00:00 2001 From: mvdbeek Date: Sun, 15 Oct 2023 22:10:44 +0200 Subject: [PATCH 043/334] Fix dataset collection unit tests --- test/unit/app/tools/test_collect_primary_datasets.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/test/unit/app/tools/test_collect_primary_datasets.py b/test/unit/app/tools/test_collect_primary_datasets.py index 08bceb0be90b..40618ca9207b 100644 --- a/test/unit/app/tools/test_collect_primary_datasets.py +++ b/test/unit/app/tools/test_collect_primary_datasets.py @@ -447,6 +447,9 @@ class MockObjectStore: def __init__(self): self.created_datasets = {} + def get_store_by(self, obj, **kwargs): + return "uuid" + def update_from_file(self, dataset, file_name, create): if create: self.created_datasets[dataset] = file_name @@ -458,7 +461,7 @@ def size(self, dataset): def exists(self, *args, **kwargs): return True - def get_filename(self, dataset): + def get_filename(self, dataset, **kwargs): return self.created_datasets[dataset] From 7e41cce56d1ec59f7ad53333b4ea12248492c31c Mon Sep 17 00:00:00 2001 From: mvdbeek Date: Sun, 15 Oct 2023 23:25:24 +0200 Subject: [PATCH 044/334] Set size for null datasets --- lib/galaxy/tools/actions/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/galaxy/tools/actions/__init__.py b/lib/galaxy/tools/actions/__init__.py index fd658c163a07..249b34f2b484 100644 --- a/lib/galaxy/tools/actions/__init__.py +++ b/lib/galaxy/tools/actions/__init__.py @@ -675,6 +675,7 @@ def handle_output(name, output, hidden=None): data.visible = False with open(data.dataset.file_name, "w") as out: out.write(json.dumps(None)) + data.set_total_size() job.preferred_object_store_id = preferred_object_store_id self._record_inputs(trans, tool, job, incoming, inp_data, inp_dataset_collections) self._record_outputs(job, out_data, output_collections) From fab8b6c85f4b1364ed6c20a97f79027f95d3e1a1 Mon Sep 17 00:00:00 2001 From: Ahmed Awan Date: Mon, 16 Oct 2023 15:32:00 -0500 Subject: [PATCH 045/334] allow partial matches in workflow name tag search do not require quotations for name tags --- client/src/components/Page/PageList.vue | 4 ++-- .../components/Workflow/WorkflowFilters.js | 4 ++-- client/src/utils/filtering.ts | 24 ++++--------------- lib/galaxy/util/search.py | 7 ++++-- 4 files changed, 14 insertions(+), 25 deletions(-) diff --git a/client/src/components/Page/PageList.vue b/client/src/components/Page/PageList.vue index e7e0f3ac311c..d1e430b26b0e 100644 --- a/client/src/components/Page/PageList.vue +++ b/client/src/components/Page/PageList.vue @@ -100,7 +100,7 @@ import StatelessTags from "components/TagsMultiselect/StatelessTags"; import UtcDate from "components/UtcDate"; import paginationMixin from "components/Workflow/paginationMixin"; import { getAppRoot } from "onload/loadConfig"; -import Filtering, { contains, equals, expandNameTagWithQuotes, toBool } from "utils/filtering"; +import Filtering, { contains, equals, expandNameTag, toBool } from "utils/filtering"; import _l from "utils/localization"; import { useRouter } from "vue-router/composables"; @@ -155,7 +155,7 @@ const validFilters = { tag: { placeholder: "tag(s)", type: "MultiTags", - handler: contains("tag", "tag", expandNameTagWithQuotes), + handler: contains("tag", "tag", expandNameTag), menuItem: true, }, published: { diff --git a/client/src/components/Workflow/WorkflowFilters.js b/client/src/components/Workflow/WorkflowFilters.js index cc25891fafef..d91c8ca14ffd 100644 --- a/client/src/components/Workflow/WorkflowFilters.js +++ b/client/src/components/Workflow/WorkflowFilters.js @@ -1,4 +1,4 @@ -import Filtering, { contains, equals, expandNameTagWithQuotes, toBool } from "utils/filtering"; +import Filtering, { contains, equals, expandNameTag, toBool } from "utils/filtering"; export const helpHtml = `

This input can be used to filter the workflows displayed.

@@ -56,7 +56,7 @@ const validFilters = { tag: { placeholder: "tag(s)", type: "MultiTags", - handler: contains("tag", "tag", expandNameTagWithQuotes), + handler: contains("tag", "tag", expandNameTag), menuItem: true, }, published: { diff --git a/client/src/utils/filtering.ts b/client/src/utils/filtering.ts index 905b41a4fd41..c3cd711f4ada 100644 --- a/client/src/utils/filtering.ts +++ b/client/src/utils/filtering.ts @@ -108,25 +108,13 @@ export function toLowerNoQuotes(value: T): string { * */ export function expandNameTag(value: T): string { if (value && typeof value === "string") { - value = value.replace(/^#/, "name:") as T; - } - return toLower(value); -} - -/** Converts name tags starting with "#" to "'name:...'"; forces quotation marks - * and **is also case-sensitive** - * @param value - * @returns Lowercase value with 'name:' replaced with '#' - */ -export function expandNameTagWithQuotes(value: T): string { - if (value && typeof value === "string") { - if (value.startsWith("'#") && value.endsWith("'")) { - value = value.replace(/^'#/g, "'name:") as T; - } else if (value.startsWith("#")) { - value = `'name:${value.slice(1)}'` as T; + if ((value.startsWith("'#") || value.startsWith('"#')) && (value.endsWith('"') || value.endsWith("'"))) { + value = value.replace(/^['"]#/g, "'name:") as T; + } else { + value = value.replace(/^#/, "name:") as T; } } - return value as string; + return toLower(value); } /** Converts string alias to string operator, e.g.: 'gt' to '>' @@ -569,8 +557,6 @@ export default class Filtering { return filterValue; } else if (!backendFormatted && ([expandNameTag, toDate] as Converter[]).includes(converter)) { return toLower(filterValue) as T; - } else if (!backendFormatted && converter == expandNameTagWithQuotes) { - return (filterValue as string).startsWith("#") ? (`'${filterValue}'` as T) : filterValue; } return converter(filterValue); } else { diff --git a/lib/galaxy/util/search.py b/lib/galaxy/util/search.py index b0db7505619e..11f072489e81 100644 --- a/lib/galaxy/util/search.py +++ b/lib/galaxy/util/search.py @@ -36,7 +36,7 @@ def parse_filters_structured( search_space = search_term.replace('"', "'") filters = filters or {} filter_keys = "|".join(list(filters.keys())) - pattern = rf"({filter_keys}):(?:\s+)?([\w-]+|\'.*?\')" + pattern = rf"({filter_keys}):(?:\s+)?([\w-]+|'.*?')(:\w+)?" reserved = re.compile(pattern) parsed_search = ParsedSearch() while True: @@ -52,8 +52,11 @@ def parse_filters_structured( else: first_group = match.groups()[0] if first_group in filters: + if match.groups()[0] == "tag" and match.groups()[1] == "name" and match.groups()[2] is not None: + group = match.groups()[1] + match.groups()[2].strip() + else: + group = match.groups()[1].strip() filter_as = filters[first_group] - group = match.groups()[1].strip() quoted = preserve_quotes and group.startswith("'") parsed_search.add_keyed_term(filter_as, group.replace("'", ""), quoted) parsed_search.add_unfiltered_text_terms(search_space[0 : match.start()]) From d234b288128b9745a480ff5e2a47940a42c3c590 Mon Sep 17 00:00:00 2001 From: guerler Date: Wed, 11 Oct 2023 21:53:23 +0300 Subject: [PATCH 046/334] Remove legacy parameter matching condition from form elements --- client/src/components/Form/FormElement.vue | 8 -------- 1 file changed, 8 deletions(-) diff --git a/client/src/components/Form/FormElement.vue b/client/src/components/Form/FormElement.vue index 7a5841e7338b..208664bdb616 100644 --- a/client/src/components/Form/FormElement.vue +++ b/client/src/components/Form/FormElement.vue @@ -18,7 +18,6 @@ import FormHidden from "./Elements/FormHidden.vue"; import FormInput from "./Elements/FormInput.vue"; import FormNumber from "./Elements/FormNumber.vue"; import FormOptionalText from "./Elements/FormOptionalText.vue"; -import FormParameter from "./Elements/FormParameter.vue"; import FormRulesEdit from "./Elements/FormRulesEdit.vue"; import FormSelection from "./Elements/FormSelection.vue"; import FormTags from "./Elements/FormTags.vue"; @@ -300,13 +299,6 @@ const isOptional = computed(() => !isRequired.value && attrs.value["optional"] ! - Date: Wed, 11 Oct 2023 22:11:44 +0300 Subject: [PATCH 047/334] Remove legacy parameters factory --- .../components/Form/Elements/parameters.js | 52 ------------------- 1 file changed, 52 deletions(-) delete mode 100644 client/src/components/Form/Elements/parameters.js diff --git a/client/src/components/Form/Elements/parameters.js b/client/src/components/Form/Elements/parameters.js deleted file mode 100644 index 01729e887c0a..000000000000 --- a/client/src/components/Form/Elements/parameters.js +++ /dev/null @@ -1,52 +0,0 @@ -/** - This class creates input elements. New input parameter types should be added to the types dictionary. -*/ -import Backbone from "backbone"; -import SelectFtp from "mvc/ui/ui-select-ftp"; -import SelectLibrary from "mvc/ui/ui-select-library"; - -// create form view -export default Backbone.View.extend({ - /** Available parameter types */ - types: { - library_data: "_fieldLibrary", - ftpfile: "_fieldFtp", - }, - - remove: function () { - this.field.remove(); - Backbone.View.prototype.remove.call(this); - }, - - /** Returns an input field for a given field type */ - create: function (input_def) { - var fieldClass = this.types[input_def.type]; - this.field = typeof this[fieldClass] === "function" ? this[fieldClass].call(this, input_def) : null; - if (input_def.value === undefined) { - input_def.value = null; - } - this.field.value(input_def.value); - this.setElement(input_def.el || "
"); - this.$el.append(this.field.$el); - }, - - /** Library dataset field */ - _fieldLibrary: function (input_def) { - return new SelectLibrary.View({ - id: input_def.id, - optional: input_def.optional, - multiple: input_def.multiple, - onchange: input_def.onchange, - }); - }, - - /** FTP file field */ - _fieldFtp: function (input_def) { - return new SelectFtp.View({ - id: input_def.id, - optional: input_def.optional, - multiple: input_def.multiple, - onchange: input_def.onchange, - }); - }, -}); From bf18eb096ad406fc57c18ec01d25cf7ae4adf336 Mon Sep 17 00:00:00 2001 From: guerler Date: Wed, 11 Oct 2023 22:12:50 +0300 Subject: [PATCH 048/334] Remove legacy ftpfile and library_data wrappers --- client/src/mvc/ui/ui-list.js | 148 ------------------------- client/src/mvc/ui/ui-select-ftp.js | 52 --------- client/src/mvc/ui/ui-select-library.js | 126 --------------------- 3 files changed, 326 deletions(-) delete mode 100644 client/src/mvc/ui/ui-list.js delete mode 100644 client/src/mvc/ui/ui-select-ftp.js delete mode 100644 client/src/mvc/ui/ui-select-library.js diff --git a/client/src/mvc/ui/ui-list.js b/client/src/mvc/ui/ui-list.js deleted file mode 100644 index e3a84db86775..000000000000 --- a/client/src/mvc/ui/ui-list.js +++ /dev/null @@ -1,148 +0,0 @@ -import Backbone from "backbone"; -import $ from "jquery"; -import Ui from "mvc/ui/ui-misc"; -import Utils from "utils/utils"; - -var View = Backbone.View.extend({ - initialize: function (options) { - this.options = options; - this.name = options.name || "element"; - this.multiple = options.multiple || false; - - // create message handler - this.message = new Ui.Message({ cls: "col mb-0" }); - - // create selections area - this.selections = $("
"); - - // create select field containing the options which can be inserted into the list - this.select = new Ui.Select.View({ optional: options.optional }); - - // create insert new list element button - this.$button = $(`