@@ -1117,7 +1119,7 @@ exports[`TokenField should render "updateOnInputChange" correctly 1`] = `
className="mr1"
>
diff --git a/frontend/test/lib/utils.unit.spec.js b/frontend/test/lib/utils.unit.spec.js
index d8db00751bcb4..987a2eb7ebef5 100644
--- a/frontend/test/lib/utils.unit.spec.js
+++ b/frontend/test/lib/utils.unit.spec.js
@@ -1,39 +1,46 @@
import MetabaseUtils from "metabase/lib/utils";
+import MetabaseSettings from "metabase/lib/settings";
describe("utils", () => {
describe("generatePassword", () => {
- it("defaults to length 14 passwords", () => {
- expect(MetabaseUtils.generatePassword().length).toBe(14);
+ it("defaults to complexity requirements from Settings", () => {
+ MetabaseSettings.set("password_complexity", { total: 10 });
+ expect(MetabaseUtils.generatePassword().length).toBe(10);
+ });
+
+ it("falls back to length 14 passwords", () => {
+ expect(MetabaseUtils.generatePassword({}).length).toBe(14);
});
it("creates passwords for the length we specify", () => {
- expect(MetabaseUtils.generatePassword(25).length).toBe(25);
+ expect(MetabaseUtils.generatePassword({ total: 25 }).length).toBe(25);
});
it("can enforce ", () => {
expect(
- MetabaseUtils.generatePassword(14, { digit: 2 }).match(/([\d])/g)
+ MetabaseUtils.generatePassword({ total: 14, digit: 2 }).match(/([\d])/g)
.length >= 2,
).toBe(true);
});
it("can enforce digit requirements", () => {
expect(
- MetabaseUtils.generatePassword(14, { digit: 2 }).match(/([\d])/g)
+ MetabaseUtils.generatePassword({ total: 14, digit: 2 }).match(/([\d])/g)
.length >= 2,
).toBe(true);
});
it("can enforce uppercase requirements", () => {
expect(
- MetabaseUtils.generatePassword(14, { uppercase: 2 }).match(/([A-Z])/g)
- .length >= 2,
+ MetabaseUtils.generatePassword({ total: 14, uppercase: 2 }).match(
+ /([A-Z])/g,
+ ).length >= 2,
).toBe(true);
});
it("can enforce special character requirements", () => {
expect(
- MetabaseUtils.generatePassword(14, { special: 2 }).match(
+ MetabaseUtils.generatePassword({ total: 14, special: 2 }).match(
/([!@#\$%\^\&*\)\(+=._-{}])/g,
).length >= 2,
).toBe(true);
diff --git a/frontend/test/nav/ProfileLink.unit.spec.js b/frontend/test/nav/ProfileLink.unit.spec.js
new file mode 100644
index 0000000000000..35c371207da93
--- /dev/null
+++ b/frontend/test/nav/ProfileLink.unit.spec.js
@@ -0,0 +1,31 @@
+import React from "react";
+import { shallow } from "enzyme";
+import ProfileLink from "metabase/nav/components/ProfileLink";
+
+jest.mock("metabase/lib/settings", () => ({
+ get: () => ({
+ tag: 1,
+ version: 1,
+ }),
+}));
+
+describe("ProfileLink", () => {
+ describe("options", () => {
+ describe("normal user", () => {
+ it("should show the proper set of items", () => {
+ const normalUser = { is_superuser: false };
+ const wrapper = shallow(
);
+
+ expect(wrapper.instance().generateOptionsForUser().length).toBe(3);
+ });
+ });
+ describe("admin", () => {
+ it("should show the proper set of items", () => {
+ const admin = { is_superuser: true };
+ const wrapper = shallow(
);
+
+ expect(wrapper.instance().generateOptionsForUser().length).toBe(5);
+ });
+ });
+ });
+});
diff --git a/frontend/test/parameters/parameters.integ.spec.js b/frontend/test/parameters/parameters.integ.spec.js
index e2345ee3162db..1adcb1876f8b0 100644
--- a/frontend/test/parameters/parameters.integ.spec.js
+++ b/frontend/test/parameters/parameters.integ.spec.js
@@ -314,7 +314,7 @@ describe("parameters", () => {
app = mount(store.getAppContainer());
await store.waitForActions([FETCH_DASHBOARD]);
- expect(app.find(".DashboardHeader .Entity h2").text()).toEqual(
+ expect(app.find(".DashboardHeader .Entity .h2").text()).toEqual(
"Test Dashboard",
);
diff --git a/frontend/test/public/public.integ.spec.js b/frontend/test/public/public.integ.spec.js
index 80df3537a3577..d119ff6796fb4 100644
--- a/frontend/test/public/public.integ.spec.js
+++ b/frontend/test/public/public.integ.spec.js
@@ -67,7 +67,6 @@ import * as Urls from "metabase/lib/urls";
import QuestionEmbedWidget from "metabase/query_builder/containers/QuestionEmbedWidget";
import EmbedWidget from "metabase/public/components/widgets/EmbedWidget";
-import Collections from "metabase/entities/collections";
import { CardApi, DashboardApi, SettingsApi } from "metabase/services";
const PEOPLE_TABLE_ID = 2;
@@ -223,7 +222,6 @@ describe("public/embedded", () => {
.first()
.find("a"),
);
- await store.waitForActions([Collections.actions.fetchList]);
setInputValue(
app.find(SaveQuestionModal).find("input[name='name']"),
diff --git a/frontend/test/pulse/pulse.integ.spec.js b/frontend/test/pulse/pulse.integ.spec.js
index 9258d367a856a..a4097400e71a1 100644
--- a/frontend/test/pulse/pulse.integ.spec.js
+++ b/frontend/test/pulse/pulse.integ.spec.js
@@ -14,7 +14,7 @@ import Question from "metabase-lib/lib/Question";
import PulseListApp from "metabase/pulse/containers/PulseListApp";
import PulseEditApp from "metabase/pulse/containers/PulseEditApp";
import PulseListItem from "metabase/pulse/components/PulseListItem";
-import CardPicker from "metabase/pulse/components/CardPicker";
+import QuestionSelect from "metabase/containers/QuestionSelect";
import PulseCardPreview from "metabase/pulse/components/PulseCardPreview";
import Toggle from "metabase/components/Toggle";
@@ -113,7 +113,7 @@ describe("Pulse", () => {
// add count card
app
- .find(CardPicker)
+ .find(QuestionSelect)
.first()
.props()
.onChange(questionCount.id());
@@ -121,7 +121,7 @@ describe("Pulse", () => {
// add raw card
app
- .find(CardPicker)
+ .find(QuestionSelect)
.first()
.props()
.onChange(questionRaw.id());
diff --git a/frontend/test/query_builder/components/VisualizationSettings.integ.spec.js b/frontend/test/query_builder/components/VisualizationSettings.integ.spec.js
index 18076d55c217d..b75b248bee1ac 100644
--- a/frontend/test/query_builder/components/VisualizationSettings.integ.spec.js
+++ b/frontend/test/query_builder/components/VisualizationSettings.integ.spec.js
@@ -16,7 +16,6 @@ import {
import { FETCH_TABLE_METADATA } from "metabase/redux/metadata";
-import CheckBox from "metabase/components/CheckBox";
import RunButton from "metabase/query_builder/components/RunButton";
import VisualizationSettings from "metabase/query_builder/components/VisualizationSettings";
@@ -65,17 +64,14 @@ describe("QueryBuilder", () => {
const doneButton = settingsModal.find(".Button--primary");
expect(doneButton.length).toBe(1);
- const fieldsToIncludeCheckboxes = settingsModal.find(CheckBox);
- expect(fieldsToIncludeCheckboxes.length).toBe(7);
+ const fieldsToIncludeRemoveButtons = settingsModal.find(".Icon-close");
+ expect(fieldsToIncludeRemoveButtons.length).toBe(6);
click(
- fieldsToIncludeCheckboxes.filterWhere(
- checkbox =>
- checkbox
- .parent()
- .find("span")
- .text() === "Created At",
- ),
+ settingsModal
+ .find("ColumnItem")
+ .findWhere(x => x.text() === "Created At")
+ .find(".Icon-close"),
);
expect(table.find('div[children="Created At"]').length).toBe(0);
diff --git a/frontend/test/query_builder/qb_visualizations.integ.spec.js b/frontend/test/query_builder/qb_visualizations.integ.spec.js
index e6cfd422f1aaa..d73414368ef14 100644
--- a/frontend/test/query_builder/qb_visualizations.integ.spec.js
+++ b/frontend/test/query_builder/qb_visualizations.integ.spec.js
@@ -26,8 +26,6 @@ import * as Urls from "metabase/lib/urls";
import VisualizationSettings from "metabase/query_builder/components/VisualizationSettings";
import Popover from "metabase/components/Popover";
-import Collections from "metabase/entities/collections";
-
const timeBreakoutQuestion = Question.create({
databaseId: 1,
tableId: 1,
@@ -78,8 +76,6 @@ describe("Query Builder visualization logic", () => {
.find("a"),
);
- await store.waitForActions([Collections.actions.fetchList]);
-
setInputValue(
app.find(SaveQuestionModal).find("input[name='name']"),
"test visualization question",
diff --git a/frontend/test/reference/metrics.integ.spec.js b/frontend/test/reference/metrics.integ.spec.js
index 642a1a52a068d..c076b35fcbb37 100644
--- a/frontend/test/reference/metrics.integ.spec.js
+++ b/frontend/test/reference/metrics.integ.spec.js
@@ -5,6 +5,7 @@ import {
import React from "react";
import { mount } from "enzyme";
+import { assocIn } from "icepick";
import { CardApi, MetricApi } from "metabase/services";
@@ -21,6 +22,8 @@ import MetricDetailContainer from "metabase/reference/metrics/MetricDetailContai
import MetricQuestionsContainer from "metabase/reference/metrics/MetricQuestionsContainer";
import MetricRevisionsContainer from "metabase/reference/metrics/MetricRevisionsContainer";
+// NOTE: database/table_id/source_table are hard-coded, this might be a problem at some point
+
describe("The Reference Section", () => {
// Test data
const metricDef = {
@@ -28,7 +31,7 @@ describe("The Reference Section", () => {
description: "I did it!",
table_id: 1,
show_in_getting_started: true,
- definition: { database: 1, query: { aggregation: ["count"] } },
+ definition: { database: 1, query: { aggregation: [["count"]] } },
};
const anotherMetricDef = {
@@ -36,7 +39,7 @@ describe("The Reference Section", () => {
description: "I did it again!",
table_id: 1,
show_in_getting_started: true,
- definition: { database: 1, query: { aggregation: ["count"] } },
+ definition: { database: 1, query: { aggregation: [["count"]] } },
};
const metricCardDef = {
@@ -46,7 +49,7 @@ describe("The Reference Section", () => {
database: 1,
table_id: 1,
type: "query",
- query: { source_table: 1, aggregation: ["metric", 1] },
+ query: { source_table: 1, aggregation: [["metric", 1]] },
},
visualization_settings: {},
};
@@ -116,10 +119,16 @@ describe("The Reference Section", () => {
});
it("Should see a newly asked question in its questions list", async () => {
- let card = await CardApi.create(metricCardDef);
- expect(card.name).toBe(metricCardDef.name);
-
+ let card;
try {
+ const cardDef = assocIn(
+ metricCardDef,
+ ["dataset_query", "query", "aggregation", 0, 1],
+ metricIds[0],
+ );
+ card = await CardApi.create(cardDef);
+ expect(card.name).toBe(metricCardDef.name);
+
// see that there is a new question on the metric's questions page
const store = await createTestStore();
@@ -127,8 +136,10 @@ describe("The Reference Section", () => {
mount(store.connectContainer(
));
await store.waitForActions([FETCH_METRICS, FETCH_METRIC_TABLE]);
} finally {
- // even if the code above results in an exception, try to delete the question
- await CardApi.delete({ cardId: card.id });
+ if (card) {
+ // even if the code above results in an exception, try to delete the question
+ await CardApi.delete({ cardId: card.id });
+ }
}
});
});
diff --git a/frontend/test/reference/segments.integ.spec.js b/frontend/test/reference/segments.integ.spec.js
index c6dea3b2735da..273281bf30cd5 100644
--- a/frontend/test/reference/segments.integ.spec.js
+++ b/frontend/test/reference/segments.integ.spec.js
@@ -5,6 +5,7 @@ import {
import React from "react";
import { mount } from "enzyme";
+import { assocIn } from "icepick";
import { CardApi, SegmentApi } from "metabase/services";
@@ -24,6 +25,8 @@ import SegmentRevisionsContainer from "metabase/reference/segments/SegmentRevisi
import SegmentFieldListContainer from "metabase/reference/segments/SegmentFieldListContainer";
import SegmentFieldDetailContainer from "metabase/reference/segments/SegmentFieldDetailContainer";
+// NOTE: database/table_id/source_table are hard-coded, this might be a problem at some point
+
describe("The Reference Section", () => {
// Test data
const segmentDef = {
@@ -154,7 +157,12 @@ describe("The Reference Section", () => {
});
it("Should see a newly asked question in its questions list", async () => {
- let card = await CardApi.create(segmentCardDef);
+ const cardDef = assocIn(
+ segmentCardDef,
+ ["dataset_query", "query", "filter", 1],
+ segmentIds[0],
+ );
+ let card = await CardApi.create(cardDef);
expect(card.name).toBe(segmentCardDef.name);
diff --git a/frontend/test/visualizations/components/ChartSettings.unit.spec.js b/frontend/test/visualizations/components/ChartSettings.unit.spec.js
deleted file mode 100644
index 1729473d62e72..0000000000000
--- a/frontend/test/visualizations/components/ChartSettings.unit.spec.js
+++ /dev/null
@@ -1,81 +0,0 @@
-import React from "react";
-
-import ChartSettings from "metabase/visualizations/components/ChartSettings";
-
-import { TableCard } from "../__support__/visualizations";
-
-import { mount } from "enzyme";
-import { click } from "__support__/enzyme_utils";
-
-function renderChartSettings(enabled = true) {
- const props = {
- series: [
- TableCard("Foo", {
- card: {
- visualization_settings: {
- "table.columns": [{ name: "Foo_col0", enabled: enabled }],
- },
- },
- }),
- ],
- };
- return mount(
{}} />);
-}
-
-// The ExplicitSize component uses the matchMedia DOM API
-// which does not exist in jest's JSDOM
-Object.defineProperty(window, "matchMedia", {
- value: jest.fn(() => {
- return {
- matches: true,
- addListener: () => {},
- removeListener: () => {},
- };
- }),
-});
-
-// We have to do some mocking here to avoid calls to GA and to Metabase settings
-jest.mock("metabase/lib/settings", () => ({
- get: () => "v",
-}));
-
-describe("ChartSettings", () => {
- describe("toggling fields", () => {
- describe("disabling all fields", () => {
- it("should show null state", () => {
- const chartSettings = renderChartSettings();
-
- expect(chartSettings.find(".toggle-all .Icon-check").length).toEqual(1);
- expect(chartSettings.find("table").length).toEqual(1);
-
- click(chartSettings.find(".toggle-all .cursor-pointer"));
-
- expect(chartSettings.find(".toggle-all .Icon-check").length).toEqual(0);
- expect(chartSettings.find("table").length).toEqual(0);
- expect(chartSettings.text()).toContain(
- "Every field is hidden right now",
- );
- });
- });
-
- describe("enabling all fields", () => {
- it("should show all columns", () => {
- const chartSettings = renderChartSettings(false);
-
- expect(chartSettings.find(".toggle-all .Icon-check").length).toEqual(0);
- expect(chartSettings.find("table").length).toEqual(0);
- expect(chartSettings.text()).toContain(
- "Every field is hidden right now",
- );
-
- click(chartSettings.find(".toggle-all .cursor-pointer"));
-
- expect(chartSettings.find(".toggle-all .Icon-check").length).toEqual(1);
- expect(chartSettings.find("table").length).toEqual(1);
- expect(chartSettings.text()).not.toContain(
- "Every field is hidden right now",
- );
- });
- });
- });
-});
diff --git a/frontend/test/visualizations/components/settings/ChartSettingOrderedColumns.unit.spec.js b/frontend/test/visualizations/components/settings/ChartSettingOrderedColumns.unit.spec.js
new file mode 100644
index 0000000000000..b2cb958d61e38
--- /dev/null
+++ b/frontend/test/visualizations/components/settings/ChartSettingOrderedColumns.unit.spec.js
@@ -0,0 +1,84 @@
+import React from "react";
+
+import ChartSettingOrderedColumns from "metabase/visualizations/components/settings/ChartSettingOrderedColumns";
+
+import { mount } from "enzyme";
+
+import { question } from "../../../__support__/sample_dataset_fixture.js";
+
+function renderChartSettingOrderedColumns(props) {
+ return mount(
+ {}}
+ columns={[{ name: "Foo" }, { name: "Bar" }]}
+ {...props}
+ />,
+ );
+}
+
+describe("ChartSettingOrderedColumns", () => {
+ it("should have the correct add and remove buttons", () => {
+ const setting = renderChartSettingOrderedColumns({
+ value: [{ name: "Foo", enabled: true }, { name: "Bar", enabled: false }],
+ });
+ expect(setting.find(".Icon-add")).toHaveLength(1);
+ expect(setting.find(".Icon-close")).toHaveLength(1);
+ });
+ it("should add a column", () => {
+ const onChange = jest.fn();
+ const setting = renderChartSettingOrderedColumns({
+ value: [{ name: "Foo", enabled: true }, { name: "Bar", enabled: false }],
+ onChange,
+ });
+ setting.find(".Icon-add").simulate("click");
+ expect(onChange.mock.calls).toEqual([
+ [[{ name: "Foo", enabled: true }, { name: "Bar", enabled: true }]],
+ ]);
+ });
+ it("should remove a column", () => {
+ const onChange = jest.fn();
+ const setting = renderChartSettingOrderedColumns({
+ value: [{ name: "Foo", enabled: true }, { name: "Bar", enabled: false }],
+ onChange,
+ });
+ setting.find(".Icon-close").simulate("click");
+ expect(onChange.mock.calls).toEqual([
+ [[{ name: "Foo", enabled: false }, { name: "Bar", enabled: false }]],
+ ]);
+ });
+ it("should reorder columns", () => {
+ const onChange = jest.fn();
+ const setting = renderChartSettingOrderedColumns({
+ value: [{ name: "Foo", enabled: true }, { name: "Bar", enabled: true }],
+ onChange,
+ });
+ // just call handleSortEnd directly for now as it's difficult to simulate drag and drop
+ setting.instance().handleSortEnd({ oldIndex: 1, newIndex: 0 });
+ expect(onChange.mock.calls).toEqual([
+ [[{ name: "Bar", enabled: true }, { name: "Foo", enabled: true }]],
+ ]);
+ });
+
+ describe("for structured queries", () => {
+ it("should list and add additional columns", () => {
+ const onChange = jest.fn();
+ const addField = jest.fn();
+ const setting = renderChartSettingOrderedColumns({
+ value: [],
+ columns: [],
+ question,
+ onChange,
+ addField,
+ });
+ expect(setting.find(".Icon-add")).toHaveLength(28);
+ setting
+ .find(".Icon-add")
+ .first()
+ .simulate("click");
+ expect(addField.mock.calls).toEqual([[["field-id", 1]]]);
+ expect(onChange.mock.calls).toEqual([
+ [[{ fieldRef: ["field-id", 1], enabled: true }]],
+ ]);
+ });
+ });
+});
diff --git a/frontend/test/visualizations/components/settings/ChartSettingOrderedFields.unit.spec.js b/frontend/test/visualizations/components/settings/ChartSettingOrderedFields.unit.spec.js
deleted file mode 100644
index 84cc84d4e8208..0000000000000
--- a/frontend/test/visualizations/components/settings/ChartSettingOrderedFields.unit.spec.js
+++ /dev/null
@@ -1,79 +0,0 @@
-import React from "react";
-
-import ChartSettingOrderedFields from "metabase/visualizations/components/settings/ChartSettingOrderedFields";
-
-import { mount } from "enzyme";
-
-function renderChartSettingOrderedFields(props) {
- return mount( {}} {...props} />);
-}
-
-describe("ChartSettingOrderedFields", () => {
- describe("isAnySelected", () => {
- describe("when on or more fields are enabled", () => {
- it("should be true", () => {
- const chartSettings = renderChartSettingOrderedFields({
- columnNames: { id: "ID", text: "Text" },
- value: [
- { name: "id", enabled: true },
- { name: "text", enabled: false },
- ],
- });
- expect(chartSettings.instance().isAnySelected()).toEqual(true);
- });
- });
-
- describe("when no fields are enabled", () => {
- it("should be false", () => {
- const chartSettings = renderChartSettingOrderedFields({
- columnNames: { id: "ID", text: "Text" },
- value: [
- { name: "id", enabled: false },
- { name: "text", enabled: false },
- ],
- });
- expect(chartSettings.instance().isAnySelected()).toEqual(false);
- });
- });
- });
-
- describe("toggleAll", () => {
- describe("when passed false", () => {
- it("should mark all fields as enabled", () => {
- const onChange = jest.fn();
- const chartSettings = renderChartSettingOrderedFields({
- columnNames: { id: "ID", text: "Text" },
- value: [
- { name: "id", enabled: false },
- { name: "text", enabled: false },
- ],
- onChange,
- });
- chartSettings.instance().handleToggleAll(false);
- expect(onChange.mock.calls[0][0]).toEqual([
- { name: "id", enabled: true },
- { name: "text", enabled: true },
- ]);
- });
- });
-
- describe("when passed true", () => {
- it("should mark all fields as disabled", () => {
- const onChange = jest.fn();
- const chartSettings = renderChartSettingOrderedFields({
- columnNames: { id: "ID", text: "Text" },
- value: [
- { name: "id", enabled: true },
- { name: "text", enabled: true },
- ],
- onChange,
- });
- chartSettings.instance().handleToggleAll(true);
- expect(onChange.mock.calls[0][0]).toEqual([
- { name: "id", enabled: false },
- { name: "text", enabled: false },
- ]);
- });
- });
- });
-});
diff --git a/project.clj b/project.clj
index 76ed1ee742832..55620b3cbf128 100644
--- a/project.clj
+++ b/project.clj
@@ -83,7 +83,7 @@
[net.sf.cssbox/cssbox "4.12" ; HTML / CSS rendering
:exclusions [org.slf4j/slf4j-api]]
[org.clojars.pntblnk/clj-ldap "0.0.12"] ; LDAP client
- [org.liquibase/liquibase-core "3.5.3"] ; migration management (Java lib)
+ [org.liquibase/liquibase-core "3.6.2"] ; migration management (Java lib)
[org.postgresql/postgresql "42.2.2"] ; Postgres driver
[org.slf4j/slf4j-log4j12 "1.7.25"] ; abstraction for logging frameworks -- allows end user to plug in desired logging framework at deployment time
[org.tcrawley/dynapath "0.2.5"] ; Dynamically add Jars (e.g. Oracle or Vertica) to classpath
@@ -96,7 +96,7 @@
[ring/ring-jetty-adapter "1.6.0"] ; Ring adapter using Jetty webserver (used to run a Ring server for unit tests)
[ring/ring-json "0.4.0"] ; Ring middleware for reading/writing JSON automatically
[stencil "0.5.0"] ; Mustache templates for Clojure
- [toucan "1.1.7" ; Model layer, hydration, and DB utilities
+ [toucan "1.1.9" ; Model layer, hydration, and DB utilities
:exclusions [honeysql]]]
:repositories [["bintray" "https://dl.bintray.com/crate/crate"] ; Repo for Crate JDBC driver
["redshift" "https://s3.amazonaws.com/redshift-driver-downloads"]]
diff --git a/resources/automagic_dashboards/field/Country.yaml b/resources/automagic_dashboards/field/Country.yaml
index fa493bb6e66d2..9cd91ba6b52cd 100644
--- a/resources/automagic_dashboards/field/Country.yaml
+++ b/resources/automagic_dashboards/field/Country.yaml
@@ -38,7 +38,7 @@ groups:
- Overview:
title: Overview
- Breakdowns:
- title: How [[this]] are distributed
+ title: How the [[this]] is distributed
cards:
- Count:
title: Count
@@ -60,7 +60,7 @@ cards:
group: Overview
width: 6
- Distribution:
- title: Distribution of [[this]]
+ title: How the [[this]] is distributed
visualization:
map:
map.type: region
diff --git a/resources/automagic_dashboards/field/DateTime.yaml b/resources/automagic_dashboards/field/DateTime.yaml
index 8af35916858fe..346f802e8882b 100644
--- a/resources/automagic_dashboards/field/DateTime.yaml
+++ b/resources/automagic_dashboards/field/DateTime.yaml
@@ -32,9 +32,9 @@ groups:
- Overview:
title: Overview
- Breakdowns:
- title: How [[this]] is distributed
+ title: How the [[this]] is distributed
- Seasonality:
- title: Seasonal patterns in [[this]]
+ title: Seasonal patterns in the [[this]]
cards:
- Count:
title: Count
diff --git a/resources/automagic_dashboards/field/GenericField.yaml b/resources/automagic_dashboards/field/GenericField.yaml
index 71d7143394f25..fe5396453e26c 100644
--- a/resources/automagic_dashboards/field/GenericField.yaml
+++ b/resources/automagic_dashboards/field/GenericField.yaml
@@ -38,7 +38,7 @@ groups:
- Overview:
title: Overview
- Breakdowns:
- title: How [[this]] are distributed
+ title: How the [[this]] is distributed
cards:
- Count:
title: Count
@@ -60,7 +60,7 @@ cards:
group: Overview
width: 6
- Distribution:
- title: How [[this]] is distributed
+ title: How the [[this]] is distributed
visualization: bar
metrics: Count
dimensions: this
@@ -68,12 +68,13 @@ cards:
width: 12
- ByNumber:
title: "[[GenericNumber]] by [[this]]"
- visualization: line
+ visualization: bar
metrics:
- Sum
- Avg
dimensions: this
group: Breakdowns
+ height: 8
- Crosstab:
title: "[[this]] by [[GenericCategoryMedium]]"
visualization: table
diff --git a/resources/automagic_dashboards/field/Number.yaml b/resources/automagic_dashboards/field/Number.yaml
index 937e04ec5c0df..01a3864a03780 100644
--- a/resources/automagic_dashboards/field/Number.yaml
+++ b/resources/automagic_dashboards/field/Number.yaml
@@ -38,11 +38,11 @@ groups:
- Overview:
title: Overview
- Breakdowns:
- title: How [[this]] is distributed across categories
+ title: How the [[this]] is distributed across categories
- Seasonality:
- title: How [[this]] changes with time
+ title: How the [[this]] changes with time
- Geographical:
- title: How [[this]] is distributed geographically
+ title: How the [[this]] is distributed geographically
cards:
- Count:
title: Count
@@ -75,7 +75,7 @@ cards:
group: Overview
width: 9
- Distribution:
- title: How [[this]] is distributed
+ title: How the [[this]] is distributed
visualization: bar
metrics: Count
dimensions:
diff --git a/resources/automagic_dashboards/field/State.yaml b/resources/automagic_dashboards/field/State.yaml
index 2df61dca59ed4..f6c19178b715e 100644
--- a/resources/automagic_dashboards/field/State.yaml
+++ b/resources/automagic_dashboards/field/State.yaml
@@ -38,7 +38,7 @@ groups:
- Overview:
title: Overview
- Breakdowns:
- title: How [[this]] are distributed
+ title: How the [[this]] is distributed
cards:
- Count:
title: Count
diff --git a/resources/automagic_dashboards/metric/GenericMetric.yaml b/resources/automagic_dashboards/metric/GenericMetric.yaml
index 46287df773328..f76fa1582a60e 100644
--- a/resources/automagic_dashboards/metric/GenericMetric.yaml
+++ b/resources/automagic_dashboards/metric/GenericMetric.yaml
@@ -1,5 +1,5 @@
-title: A look at your [[this]]
-transient_title: Here's a quick look at your [[this]]
+title: A look at the [[this]]
+transient_title: Here's a quick look at the [[this]]
description: How it's distributed across time and other categories.
applies_to: GenericTable
metrics:
@@ -35,15 +35,17 @@ dimensions:
field_type: GenericTable.ZipCode
groups:
- Periodicity:
- title: "[[this]] over time"
+ title: The [[this]] over time
- Geographical:
- title: "[[this]] by location"
+ title: The [[this]] by location
- Categories:
- title: How [[this]] is distributed across different categories
+ title: How this metric is distributed across different categories
- Numbers:
- title: How [[this]] is distributed across different numbers
- - LargeCategories:
- title: Top and bottom [[this]]
+ title: How this metric is distributed across different numbers
+ - LargeCategoriesTop:
+ title: Top 5 per category
+ - LargeCategoriesBottom:
+ title: Bottom 5 per category
dashboard_filters:
- Timestamp
- State
@@ -119,9 +121,11 @@ cards:
map.region: us_states
- ByNumber:
group: Numbers
- title: How [[this]] is distributed across [[GenericNumber]]
+ title: "[[this]] by [[GenericNumber]]"
metrics: this
- dimensions: GenericNumber
+ dimensions:
+ - GenericNumber:
+ aggregation: default
visualization: bar
- ByCategoryMedium:
group: Categories
@@ -151,7 +155,7 @@ cards:
order_by:
this: descending
- ByCategoryLargeTop:
- group: LargeCategories
+ group: LargeCategoriesTop
title: "[[this]] per [[GenericCategoryLarge]], top 5"
metrics: this
dimensions: GenericCategoryLarge
@@ -160,7 +164,7 @@ cards:
this: descending
limit: 5
- ByCategoryLargeBottom:
- group: LargeCategories
+ group: LargeCategoriesBottom
title: "[[this]] per [[GenericCategoryLarge]], bottom 5"
metrics: this
dimensions: GenericCategoryLarge
diff --git a/resources/automagic_dashboards/question/GenericQuestion.yaml b/resources/automagic_dashboards/question/GenericQuestion.yaml
index a7d0cfaa713b0..ad7a7b8b067df 100644
--- a/resources/automagic_dashboards/question/GenericQuestion.yaml
+++ b/resources/automagic_dashboards/question/GenericQuestion.yaml
@@ -1,4 +1,4 @@
title: "A closer look at your [[this]]"
transient_title: "Here's a closer look at your [[this]]"
-description: Here is breakdown of metrics and dimensions used in [[this]].
-applies_to: GenericTable
\ No newline at end of file
+description: A closer look at the metrics and dimensions used in this saved question.
+applies_to: GenericTable
diff --git a/resources/automagic_dashboards/table/GenericTable.yaml b/resources/automagic_dashboards/table/GenericTable.yaml
index cb9744168ce42..d06fc4986ef3c 100644
--- a/resources/automagic_dashboards/table/GenericTable.yaml
+++ b/resources/automagic_dashboards/table/GenericTable.yaml
@@ -20,10 +20,6 @@ dimensions:
- Source:
field_type: GenericTable.Source
score: 100
- - GenericCategorySmall:
- field_type: GenericTable.Category
- score: 80
- max_cardinality: 5
- GenericCategoryMedium:
field_type: GenericTable.Category
score: 75
@@ -91,13 +87,13 @@ groups:
- Overview:
title: Summary
- Singletons:
- title: These are the same for all your [[this]]
+ title: These are the same for all your [[this.short-name]]
- ByTime:
- title: "[[this]] across time"
+ title: "These [[this.short-name]] across time"
- Geographical:
- title: Where your [[this]] are
+ title: Where your [[this.short-name]] are
- General:
- title: How [[this]] are distributed
+ title: How these [[this.short-name]] are distributed
dashboard_filters:
- Timestamp
- Date
@@ -112,13 +108,13 @@ dashboard_filters:
cards:
# Overview
- Rowcount:
- title: Total [[this]]
+ title: Total [[this.short-name]]
visualization: scalar
metrics: Count
score: 100
group: Overview
- RowcountLast30Days:
- title: New [[this]] in the last 30 days
+ title: "[[this.short-name]] added in the last 30 days"
visualization: scalar
metrics: Count
score: 100
@@ -132,7 +128,7 @@ cards:
group: Overview
# General
- NumberDistribution:
- title: How [[this]] are distributed across [[GenericNumber]]
+ title: "[[this.short-name]] by [[GenericNumber]]"
dimensions:
- GenericNumber:
aggregation: default
@@ -141,7 +137,7 @@ cards:
score: 90
group: General
- CountByCategoryMedium:
- title: "[[this]] per [[GenericCategoryMedium]]"
+ title: "[[this.short-name]] per [[GenericCategoryMedium]]"
dimensions: GenericCategoryMedium
metrics: Count
visualization: row
@@ -151,7 +147,7 @@ cards:
order_by:
- Count: descending
- CountByCategoryLarge:
- title: "[[this]] per [[GenericCategoryLarge]]"
+ title: "[[this.short-name]] per [[GenericCategoryLarge]]"
dimensions: GenericCategoryLarge
metrics: Count
visualization: table
@@ -162,7 +158,7 @@ cards:
- Count: descending
# Geographical
- CountByCountry:
- title: "[[this]] per country"
+ title: "[[this.short-name]] per country"
metrics: Count
dimensions: Country
score: 90
@@ -173,7 +169,7 @@ cards:
group: Geographical
height: 6
- CountByState:
- title: "[[this]] per state"
+ title: "[[this.short-name]] per state"
metrics: Count
dimensions: State
score: 90
@@ -184,7 +180,7 @@ cards:
group: Geographical
height: 6
- CountByCoords:
- title: "[[this]] by coordinates"
+ title: "[[this.short-name]] by coordinates"
metrics: Count
dimensions:
- Long
@@ -195,42 +191,42 @@ cards:
height: 6
# By Time
- CountByJoinDate:
- title: "[[this]] that have joined over time"
+ title: "[[this.short-name]] that have joined over time"
visualization: line
dimensions: JoinTimestamp
metrics: Count
score: 90
group: ByTime
- CountByJoinDate:
- title: "[[this]] that have joined over time"
+ title: "[[this.short-name]] that have joined over time"
visualization: line
dimensions: JoinDate
metrics: Count
score: 90
group: ByTime
- CountByCreateDate:
- title: New [[this]] over time
+ title: New [[this.short-name]] over time
visualization: line
dimensions: CreateTimestamp
metrics: Count
score: 90
group: ByTime
- CountByCreateDate:
- title: New [[this]] over time
+ title: New [[this.short-name]] over time
visualization: line
dimensions: CreateDate
metrics: Count
score: 90
group: ByTime
- CountByTimestamp:
- title: "[[this]] by [[Timestamp]]"
+ title: "[[this.short-name]] by [[Timestamp]]"
visualization: line
dimensions: Timestamp
metrics: Count
score: 20
group: ByTime
- CountByTimestamp:
- title: "[[this]] by [[Timestamp]]"
+ title: "[[this.short-name]] by [[Timestamp]]"
visualization: line
dimensions: Date
metrics: Count
@@ -371,7 +367,7 @@ cards:
group: ByTime
x_label: "[[Timestamp]]"
- DayOfWeekCreateDate:
- title: Weekdays when new [[this]] were added
+ title: Weekdays when [[this.short-name]] were added
visualization: bar
dimensions:
- CreateTimestamp:
@@ -381,7 +377,7 @@ cards:
group: ByTime
x_label: Created At by day of the week
- DayOfWeekCreateDate:
- title: Weekdays when new [[this]] were added
+ title: Weekdays when [[this.short-name]] were added
visualization: bar
dimensions:
- CreateDate:
@@ -391,7 +387,7 @@ cards:
group: ByTime
x_label: Created At by day of the week
- HourOfDayCreateDate:
- title: Hours when new [[this]] were added
+ title: Hours when [[this.short-name]] were added
visualization: bar
dimensions:
- CreateTimestamp:
@@ -401,7 +397,7 @@ cards:
group: ByTime
x_label: Created At by hour of the day
- HourOfDayCreateDate:
- title: Hours when new [[this]] were added
+ title: Hours when [[this.short-name]] were added
visualization: bar
dimensions:
- CreateTime:
@@ -411,7 +407,7 @@ cards:
group: ByTime
x_label: Created At by hour of the day
- DayOfMonthCreateDate:
- title: Days when new [[this]] were added
+ title: Days when [[this.short-name]] were added
visualization: bar
dimensions:
- CreateTimestamp:
@@ -421,7 +417,7 @@ cards:
group: ByTime
x_label: Created At by day of the month
- DayOfMonthCreateDate:
- title: Days when new [[this]] were added
+ title: Days when [[this.short-name]] were added
visualization: bar
dimensions:
- CreateDate:
@@ -431,7 +427,7 @@ cards:
group: ByTime
x_label: Created At by day of the month
- MonthOfYearCreateDate:
- title: Months when new [[this]] were added
+ title: Months when [[this.short-name]] were added
visualization: bar
dimensions:
- CreateTimestamp:
@@ -441,7 +437,7 @@ cards:
group: ByTime
x_label: Created At by month of the year
- MonthOfYearCreateDate:
- title: Months when new [[this]] were added
+ title: Months when [[this.short-name]] were added
visualization: bar
dimensions:
- CreateDate:
@@ -451,7 +447,7 @@ cards:
group: ByTime
x_label: Created At by month of the year
- QuerterOfYearCreateDate:
- title: Quarters when new [[this]] were added
+ title: Quarters when [[this.short-name]] were added
visualization: bar
dimensions:
- CreateTimestamp:
@@ -461,7 +457,7 @@ cards:
group: ByTime
x_label: Created At by quarter of the year
- QuerterOfYearCreateDate:
- title: Quarters when new [[this]] were added
+ title: Quarters when [[this.short-name]] were added
visualization: bar
dimensions:
- CreateDate:
@@ -471,7 +467,7 @@ cards:
group: ByTime
x_label: Created At by quarter of the year
- DayOfWeekJoinDate:
- title: Weekdays when [[this]] joined
+ title: Weekdays when [[this.short-name]] joined
visualization: bar
dimensions:
- JoinTimestamp:
@@ -481,7 +477,7 @@ cards:
group: ByTime
x_label: Join date by day of the week
- DayOfWeekJoinDate:
- title: Weekdays when [[this]] joined
+ title: Weekdays when [[this.short-name]] joined
visualization: bar
dimensions:
- JoinDate:
@@ -491,7 +487,7 @@ cards:
group: ByTime
x_label: Join date by day of the week
- HourOfDayJoinDate:
- title: Hours when [[this]] joined
+ title: Hours when [[this.short-name]] joined
visualization: bar
dimensions:
- JoinTimestamp:
@@ -501,7 +497,7 @@ cards:
group: ByTime
x_label: Join date by hour of the day
- HourOfDayJoinDate:
- title: Hours when [[this]] joined
+ title: Hours when [[this.short-name]] joined
visualization: bar
dimensions:
- JoinTime:
@@ -511,7 +507,7 @@ cards:
group: ByTime
x_label: Join date by hour of the day
- DayOfMonthJoinDate:
- title: Days of the month when [[this]] joined
+ title: Days of the month when [[this.short-name]] joined
visualization: bar
dimensions:
- JoinTimestamp:
@@ -521,7 +517,7 @@ cards:
group: ByTime
x_label: Join date by day of the month
- DayOfMonthJoinDate:
- title: Days of the month when [[this]] joined
+ title: Days of the month when [[this.short-name]] joined
visualization: bar
dimensions:
- JoinDate:
@@ -531,7 +527,7 @@ cards:
group: ByTime
x_label: Join date by day of the month
- MonthOfYearJoinDate:
- title: Months when [[this]] joined
+ title: Months when [[this.short-name]] joined
visualization: bar
dimensions:
- JoinTimestamp:
@@ -541,7 +537,7 @@ cards:
group: ByTime
x_label: Join date by month of the year
- MonthOfYearJoinDate:
- title: Months when [[this]] joined
+ title: Months when [[this.short-name]] joined
visualization: bar
dimensions:
- JoinDate:
@@ -551,7 +547,7 @@ cards:
group: ByTime
x_label: Join date by month of the year
- QuerterOfYearJoinDate:
- title: Quarters when [[this]] joined
+ title: Quarters when [[this.short-name]] joined
visualization: bar
dimensions:
- JoinTimestamp:
@@ -561,7 +557,7 @@ cards:
group: ByTime
x_label: Join date by quarter of the year
- QuerterOfYearJoinDate:
- title: Quarters when [[this]] joined
+ title: Quarters when [[this.short-name]] joined
visualization: bar
dimensions:
- JoinDate:
diff --git a/resources/automagic_dashboards/table/UserTable.yaml b/resources/automagic_dashboards/table/UserTable.yaml
index 1844a20527cb0..b6297d02f8361 100644
--- a/resources/automagic_dashboards/table/UserTable.yaml
+++ b/resources/automagic_dashboards/table/UserTable.yaml
@@ -23,14 +23,11 @@ dimensions:
- JoinDate:
field_type: Date
score: 30
-- Source:
- field_type: Source
- GenericNumber:
field_type: GenericTable.Number
score: 80
- Source:
field_type: GenericTable.Source
- score: 100
- GenericCategoryMedium:
field_type: GenericTable.Category
score: 75
@@ -54,9 +51,9 @@ groups:
- Overview:
title: Overview
- Geographical:
- title: Where these [[this]] are
+ title: Where these [[this.short-name]] are
- General:
- title: How these [[this]] are distributed
+ title: How these [[this.short-name]] are distributed
dashboard_filters:
- JoinDate
- GenericCategoryMedium
@@ -66,7 +63,7 @@ dashboard_filters:
cards:
# Overview
- Rowcount:
- title: Total [[this]]
+ title: Total [[this.short-name]]
visualization: scalar
metrics: Count
score: 100
@@ -74,7 +71,7 @@ cards:
width: 5
height: 3
- RowcountLast30Days:
- title: New [[this]] in the last 30 days
+ title: New [[this.short-name]] in the last 30 days
visualization: scalar
metrics: Count
score: 100
@@ -84,8 +81,7 @@ cards:
height: 3
- NewUsersByMonth:
visualization: line
- title: New [[this]] per month
- description: The number of new [[this]] each month
+ title: New [[this.short-name]] per month
dimensions: JoinDate
metrics: Count
score: 100
@@ -94,7 +90,7 @@ cards:
height: 7
# Geographical
- CountByCountry:
- title: Number of [[this]] per country
+ title: Per country
metrics: Count
dimensions: Country
score: 90
@@ -104,7 +100,7 @@ cards:
map.region: world_countries
group: Geographical
- CountByState:
- title: "[[this]] per state"
+ title: "Per state"
metrics: Count
dimensions: State
score: 90
@@ -115,7 +111,7 @@ cards:
map.region: us_states
group: Geographical
- CountByCoords:
- title: "[[this]] by coordinates"
+ title: "By coordinates"
metrics: Count
dimensions:
- Long
@@ -135,7 +131,7 @@ cards:
score: 90
group: General
- CountByCategoryMedium:
- title: "[[this]] per [[GenericCategoryMedium]]"
+ title: "Per [[GenericCategoryMedium]]"
dimensions: GenericCategoryMedium
metrics: Count
visualization: row
@@ -144,8 +140,18 @@ cards:
group: General
order_by:
- Count: descending
+ - CountBySource:
+ title: "Per [[Source]]"
+ dimensions: Source
+ metrics: Count
+ visualization: row
+ score: 80
+ height: 8
+ group: General
+ order_by:
+ - Count: descending
- CountByCategoryLarge:
- title: "[[this]] per [[GenericCategoryLarge]]"
+ title: "Per [[GenericCategoryLarge]]"
dimensions: GenericCategoryLarge
metrics: Count
visualization: table
diff --git a/resources/migrations/000_migrations.yaml b/resources/migrations/000_migrations.yaml
index 518b0708b6976..e84372866dbf2 100644
--- a/resources/migrations/000_migrations.yaml
+++ b/resources/migrations/000_migrations.yaml
@@ -1,7 +1,22 @@
databaseChangeLog:
+# The quoting strategy decides when things like column names should be quoted.
+# This is in place to deal with Liquibase not knowing about all of the new
+# reserved words in MySQL 8+. Using a column name that is a reserved word
+# causes a failure. Quoting all objects breaks H2 support though as it will
+# quote table names but not quote that same table name in a foreign key reference
+# which will cause a failure when trying to initially setup the database.
+ - property:
+ name: quote_strategy
+ value: QUOTE_ALL_OBJECTS
+ dbms: mysql,mariadb
+ - property:
+ name: quote_strategy
+ value: LEGACY
+ dbms: postgresql,h2
- changeSet:
id: '1'
author: agilliland
+ objectQuotingStrategy: ${quote_strategy}
changes:
- createTable:
columns:
@@ -1449,6 +1464,8 @@ databaseChangeLog:
- changeSet:
id: 10
author: cammsaul
+ validCheckSum: 7:97fec69516d0dfe424ea7365f51bb87e
+ validCheckSum: 7:3b90e2fe0ac8e617a1f30ef95d39319b
changes:
- createTable:
tableName: revision
@@ -1509,7 +1526,7 @@ databaseChangeLog:
replace: WITHOUT
with: WITH
- modifySql:
- dbms: mysql
+ dbms: mysql,mariadb
replace:
replace: object VARCHAR
with: object TEXT
@@ -1552,6 +1569,8 @@ databaseChangeLog:
- changeSet:
id: 13
author: agilliland
+ validCheckSum: 7:f27286894439bef33ff93761f9b32bc4
+ validCheckSum: 7:1bc8ccc9b1803cda5651f144029be40c
changes:
- createTable:
tableName: activity
@@ -1636,7 +1655,7 @@ databaseChangeLog:
replace: WITHOUT
with: WITH
- modifySql:
- dbms: mysql
+ dbms: mysql,mariadb
replace:
replace: details VARCHAR
with: details TEXT
@@ -1698,6 +1717,7 @@ databaseChangeLog:
- changeSet:
id: 15
author: agilliland
+ objectQuotingStrategy: ${quote_strategy}
changes:
- addColumn:
tableName: revision
@@ -2060,6 +2080,7 @@ databaseChangeLog:
- changeSet:
id: 23
author: agilliland
+ objectQuotingStrategy: ${quote_strategy}
changes:
- modifyDataType:
tableName: metabase_table
@@ -3186,6 +3207,7 @@ databaseChangeLog:
- changeSet:
id: 46
author: camsaul
+ objectQuotingStrategy: ${quote_strategy}
changes:
- addNotNullConstraint:
tableName: report_dashboardcard
@@ -3501,7 +3523,7 @@ databaseChangeLog:
- property:
name: blob.type
value: blob
- dbms: mysql,h2
+ dbms: mysql,h2,mariadb
- property:
name: blob.type
value: bytea
@@ -4272,7 +4294,7 @@ databaseChangeLog:
dbms: postgresql,h2
sql: "COMMENT ON COLUMN collection.name IS 'The user-facing name of this Collection.'"
- sql:
- dbms: mysql
+ dbms: mysql,mariadb
sql: "ALTER TABLE `collection` CHANGE `name` `name` TEXT NOT NULL COMMENT 'The user-facing name of this Collection.'"
# In 0.30.0 we finally removed the long-deprecated native read permissions. Since they're no longer considered valid by
diff --git a/src/metabase/api/collection.clj b/src/metabase/api/collection.clj
index 841142ab9d683..f6b19eb527a4c 100644
--- a/src/metabase/api/collection.clj
+++ b/src/metabase/api/collection.clj
@@ -95,7 +95,7 @@
Works for either a normal Collection or the Root Collection."
[collection :- collection/CollectionWithLocationAndIDOrRoot]
(-> collection
- (hydrate :effective_location :effective_ancestors :can_write)))
+ (hydrate :parent_id :effective_location :effective_ancestors :can_write)))
(s/defn ^:private collection-items
"Return items in the Collection, restricted by `children-options`.
diff --git a/src/metabase/api/session.clj b/src/metabase/api/session.clj
index eeb578f5373db..22e80070ba807 100644
--- a/src/metabase/api/session.clj
+++ b/src/metabase/api/session.clj
@@ -33,7 +33,8 @@
(db/insert! Session
:id <>
:user_id (:id user))
- (events/publish-event! :user-login {:user_id (:id user), :session_id <>, :first_login (not (boolean (:last_login user)))})))
+ (events/publish-event! :user-login
+ {:user_id (:id user), :session_id <>, :first_login (not (boolean (:last_login user)))})))
;;; ## API Endpoints
@@ -53,7 +54,8 @@
(try
(when-let [user-info (ldap/find-user username)]
(when-not (ldap/verify-password user-info password)
- ;; Since LDAP knows about the user, fail here to prevent the local strategy to be tried with a possibly outdated password
+ ;; Since LDAP knows about the user, fail here to prevent the local strategy to be tried with a possibly
+ ;; outdated password
(throw (ex-info password-fail-message
{:status-code 400
:errors {:password password-fail-snippet}})))
@@ -114,7 +116,8 @@
(throttle/check (forgot-password-throttlers :ip-address) remote-address)
(throttle/check (forgot-password-throttlers :email) email)
;; Don't leak whether the account doesn't exist, just pretend everything is ok
- (when-let [{user-id :id, google-auth? :google_auth} (db/select-one ['User :id :google_auth] :email email, :is_active true)]
+ (when-let [{user-id :id, google-auth? :google_auth} (db/select-one ['User :id :google_auth]
+ :email email, :is_active true)]
(let [reset-token (user/set-password-reset-token! user-id)
password-reset-url (str (public-settings/site-url) "/auth/reset_password/" reset-token)]
(email/send-password-reset-email! email google-auth? server-name password-reset-url)
@@ -130,7 +133,8 @@
[^String token]
(when-let [[_ user-id] (re-matches #"(^\d+)_.+$" token)]
(let [user-id (Integer/parseInt user-id)]
- (when-let [{:keys [reset_token reset_triggered], :as user} (db/select-one [User :id :last_login :reset_triggered :reset_token]
+ (when-let [{:keys [reset_token reset_triggered], :as user} (db/select-one [User :id :last_login :reset_triggered
+ :reset_token]
:id user-id, :is_active true)]
;; Make sure the plaintext token matches up with the hashed one for this user
(when (u/ignore-exceptions
@@ -206,8 +210,9 @@
(when-not (autocreate-user-allowed-for-email? email)
;; Use some wacky status code (428 - Precondition Required) so we will know when to so the error screen specific
;; to this situation
- (throw (ex-info (tru "You''ll need an administrator to create a Metabase account before you can use Google to log in.")
- {:status-code 428}))))
+ (throw
+ (ex-info (tru "You''ll need an administrator to create a Metabase account before you can use Google to log in.")
+ {:status-code 428}))))
(s/defn ^:private google-auth-create-new-user!
[{:keys [email] :as new-user} :- user/NewUser]
diff --git a/src/metabase/automagic_dashboards/core.clj b/src/metabase/automagic_dashboards/core.clj
index d0610e428eced..641b40a1239bb 100644
--- a/src/metabase/automagic_dashboards/core.clj
+++ b/src/metabase/automagic_dashboards/core.clj
@@ -32,10 +32,12 @@
[metabase.related :as related]
[metabase.sync.analyze.classify :as classify]
[metabase.util :as u]
+ [metabase.util.date :as date]
[puppetlabs.i18n.core :as i18n :refer [tru trs]]
[ring.util.codec :as codec]
[schema.core :as s]
- [toucan.db :as db]))
+ [toucan.db :as db])
+ (:import java.util.TimeZone))
(def ^:private public-endpoint "/auto/dashboard/")
@@ -46,57 +48,80 @@
[root id-or-name]
(if (->> root :source (instance? (type Table)))
(Field id-or-name)
- (let [field (->> root
+ (when-let [field (->> root
:source
:result_metadata
- (some (comp #{id-or-name} :name)))]
+ (m/find-first (comp #{id-or-name} :name)))]
(-> field
(update :base_type keyword)
(update :special_type keyword)
field/map->FieldInstance
(classify/run-classifiers {})))))
-(defn- metric->description
- [root metric]
- (let [aggregation-clause (-> metric :definition :aggregation first)
- field (some->> aggregation-clause
- second
- filters/field-reference->id
- (->field root))]
- (if field
- (tru "{0} of {1}" (-> aggregation-clause first name str/capitalize) (:display_name field))
- (-> aggregation-clause first name str/capitalize))))
+(def ^:private ^{:arglists '([root])} source-name
+ (comp (some-fn :display_name :name) :source))
+
+(def ^:private op->name
+ {:sum (tru "sum")
+ :avg (tru "average")
+ :min (tru "minumum")
+ :max (tru "maximum")
+ :count (tru "number")
+ :distinct (tru "distinct count")
+ :stddev (tru "standard deviation")
+ :cum-count (tru "cumulative count")
+ :cum-sum (tru "cumulative sum")})
+
+(def ^:private ^{:arglists '([metric])} saved-metric?
+ (comp #{:metric} qp.util/normalize-token first))
+
+(def ^:private ^{:arglists '([metric])} custom-expression?
+ (comp #{:named} qp.util/normalize-token first))
+
+(def ^:private ^{:arglists '([metric])} adhoc-metric?
+ (complement (some-fn saved-metric? custom-expression?)))
+
+(defn- metric-name
+ [[op & args :as metric]]
+ (cond
+ (adhoc-metric? metric) (-> op qp.util/normalize-token op->name)
+ (saved-metric? metric) (-> args first Metric :name)
+ :else (second args)))
(defn- join-enumeration
- [[x & xs]]
- (if xs
+ [xs]
+ (if (next xs)
(tru "{0} and {1}" (str/join ", " (butlast xs)) (last xs))
- x))
+ (first xs)))
+
+(defn- metric->description
+ [root aggregation-clause]
+ (join-enumeration
+ (for [metric (if (sequential? (first aggregation-clause))
+ aggregation-clause
+ [aggregation-clause])]
+ (if (adhoc-metric? metric)
+ (tru "{0} of {1}" (metric-name metric) (or (some->> metric
+ second
+ filters/field-reference->id
+ (->field root)
+ :display_name)
+ (source-name root)))
+ (metric-name metric)))))
(defn- question-description
[root question]
(let [aggregations (->> (qp.util/get-in-normalized question [:dataset_query :query :aggregation])
- (map (fn [[op arg]]
- (cond
- (-> op qp.util/normalize-token (= :metric))
- (-> arg Metric :name)
-
- arg
- (tru "{0} of {1}" (name op) (->> arg
- filters/field-reference->id
- (->field root)
- :display_name))
-
- :else
- (name op))))
- join-enumeration)
+ (metric->description root))
dimensions (->> (qp.util/get-in-normalized question [:dataset_query :query :breakout])
(mapcat filters/collect-field-references)
(map (comp :display_name
(partial ->field root)
filters/field-reference->id))
join-enumeration)]
- (tru "{0} by {1}" aggregations dimensions)))
+ (if dimensions
+ (tru "{0} by {1}" aggregations dimensions)
+ aggregations)))
(def ^:private ^{:arglists '([x])} encode-base64-json
(comp codec/base64-encode codecs/str->bytes json/encode))
@@ -112,6 +137,7 @@
:full-name (if (isa? (:entity_type table) :entity/GoogleAnalyticsTable)
(:display_name table)
(tru "{0} table" (:display_name table)))
+ :short-name (:display_name table)
:source table
:database (:db_id table)
:url (format "%stable/%s" public-endpoint (u/get-id table))
@@ -121,10 +147,11 @@
[segment]
(let [table (-> segment :table_id Table)]
{:entity segment
- :full-name (tru "{0} segment" (:name segment))
+ :full-name (tru "{0} in the {1} segment" (:display_name table) (:name segment))
+ :short-name (:display_name table)
:source table
:database (:db_id table)
- :query-filter (-> segment :definition :filter)
+ :query-filter [:SEGMENT (u/get-id segment)]
:url (format "%ssegment/%s" public-endpoint (u/get-id segment))
:rules-prefix ["table"]}))
@@ -132,7 +159,10 @@
[metric]
(let [table (-> metric :table_id Table)]
{:entity metric
- :full-name (tru "{0} metric" (:name metric))
+ :full-name (if (:id metric)
+ (tru "{0} metric" (:name metric))
+ (:name metric))
+ :short-name (:name metric)
:source table
:database (:db_id table)
;; We use :id here as it might not be a concrete field but rather one from a nested query which
@@ -145,6 +175,7 @@
(let [table (field/table field)]
{:entity field
:full-name (tru "{0} field" (:display_name field))
+ :short-name (:display_name field)
:source table
:database (:db_id table)
;; We use :id here as it might not be a concrete metric but rather one from a nested query
@@ -176,31 +207,35 @@
source-question
(assoc :entity_type :entity/GenericTable))
(native-query? card) (-> card (assoc :entity_type :entity/GenericTable))
- :else (-> card ((some-fn :table_id :table-id)) Table)))
+ :else (-> card (qp.util/get-normalized :table-id) Table)))
(defmethod ->root (type Card)
[card]
- {:entity card
- :source (source card)
- :database (:database_id card)
- :query-filter (qp.util/get-in-normalized card [:dataset_query :query :filter])
- :full-name (tru "{0} question" (:name card))
- :url (format "%squestion/%s" public-endpoint (u/get-id card))
- :rules-prefix [(if (table-like? card)
- "table"
- "question")]})
+ (let [source (source card)]
+ {:entity card
+ :source source
+ :database (:database_id card)
+ :query-filter (qp.util/get-in-normalized card [:dataset_query :query :filter])
+ :full-name (tru "\"{0}\" question" (:name card))
+ :short-name (source-name {:source source})
+ :url (format "%squestion/%s" public-endpoint (u/get-id card))
+ :rules-prefix [(if (table-like? card)
+ "table"
+ "question")]}))
(defmethod ->root (type Query)
[query]
- (let [source (source query)]
+ (let [source (source query)]
{:entity query
:source source
:database (:database-id query)
+ :query-filter (qp.util/get-in-normalized query [:dataset_query :query :filter])
:full-name (cond
(native-query? query) (tru "Native query")
(table-like? query) (-> source ->root :full-name)
:else (question-description {:source source} query))
- :url (format "%sadhoc/%s" public-endpoint (encode-base64-json query))
+ :short-name (source-name {:source source})
+ :url (format "%sadhoc/%s" public-endpoint (encode-base64-json (:dataset_query query)))
:rules-prefix [(if (table-like? query)
"table"
"question")]}))
@@ -349,8 +384,13 @@
bindings)
(comp first #(filter-tables % tables) rules/->entity)
identity)]
- (str/replace s #"\[\[(\w+)\]\]" (fn [[_ identifier]]
- (->reference template-type (bindings identifier))))))
+ (str/replace s #"\[\[(\w+)(?:\.([\w\-]+))?\]\]"
+ (fn [[_ identifier attribute]]
+ (let [entity (bindings identifier)
+ attribute (some-> attribute qp.util/normalize-token)]
+ (or (and (ifn? entity) (entity attribute))
+ (root attribute)
+ (->reference template-type entity)))))))
(defn- field-candidates
[context {:keys [field_type links_to named max_cardinality] :as constraints}]
@@ -440,9 +480,9 @@
(-> context :source u/get-id)
(->> context :source u/get-id (str "card__")))}
(not-empty filters)
- (assoc :filter (transduce (map :filter)
- merge-filter-clauses
- filters))
+ (assoc :filter (->> filters
+ (map :filter)
+ (apply merge-filter-clauses)))
(not-empty dimensions)
(assoc :breakout dimensions)
@@ -487,21 +527,41 @@
(u/update-when :graph.metrics metric->name)
(u/update-when :graph.dimensions dimension->name))]))
+(defn- capitalize-first
+ [s]
+ (str (str/upper-case (subs s 0 1)) (subs s 1)))
+
(defn- instantiate-metadata
[x context bindings]
(-> (walk/postwalk
(fn [form]
(if (string? form)
- (fill-templates :string context bindings form)
+ (let [new-form (fill-templates :string context bindings form)]
+ (if (not= new-form form)
+ (capitalize-first new-form)
+ new-form))
form))
x)
(u/update-when :visualization #(instantate-visualization % bindings (:metrics context)))))
(defn- valid-breakout-dimension?
- [{:keys [base_type engine] :as f}]
+ [{:keys [base_type engine]}]
(not (and (isa? base_type :type/Number)
(= engine :druid))))
+(defn- singular-cell-dimensions
+ [root]
+ (letfn [(collect-dimensions [[op & args]]
+ (case (some-> op qp.util/normalize-token)
+ :and (mapcat collect-dimensions args)
+ := (filters/collect-field-references args)
+ nil))]
+ (->> root
+ :cell-query
+ collect-dimensions
+ (map filters/field-reference->id)
+ set)))
+
(defn- card-candidates
"Generate all potential cards given a card definition and bindings for
dimensions, metrics, and filters."
@@ -509,8 +569,7 @@
(let [order_by (build-order-by dimensions metrics order_by)
metrics (map (partial get (:metrics context)) metrics)
filters (cond-> (map (partial get (:filters context)) filters)
- (:query-filter context)
- (conj {:filter (:query-filter context)}))
+ (:query-filter context) (conj {:filter (:query-filter context)}))
score (if query
score
(* (or (->> dimensions
@@ -520,33 +579,35 @@
rules/max-score)
(/ score rules/max-score)))
dimensions (map (comp (partial into [:dimension]) first) dimensions)
- used-dimensions (rules/collect-dimensions [dimensions metrics filters query])]
+ used-dimensions (rules/collect-dimensions [dimensions metrics filters query])
+ cell-dimension? (->> context :root singular-cell-dimensions)]
(->> used-dimensions
(map (some-fn #(get-in (:dimensions context) [% :matches])
(comp #(filter-tables % (:tables context)) rules/->entity)))
(apply combo/cartesian-product)
- (filter (fn [instantiations]
+ (map (partial zipmap used-dimensions))
+ (filter (fn [bindings]
(->> dimensions
- (map (comp (zipmap used-dimensions instantiations) second))
- (every? valid-breakout-dimension?))))
- (map (fn [instantiations]
- (let [bindings (zipmap used-dimensions instantiations)
- query (if query
- (build-query context bindings query)
- (build-query context bindings
- filters
- metrics
- dimensions
- limit
- order_by))]
+ (map (comp bindings second))
+ (every? (every-pred valid-breakout-dimension?
+ (complement (comp cell-dimension? id-or-name)))))))
+ (map (fn [bindings]
+ (let [query (if query
+ (build-query context bindings query)
+ (build-query context bindings
+ filters
+ metrics
+ dimensions
+ limit
+ order_by))]
(-> card
- (assoc :metrics metrics)
(instantiate-metadata context (->> metrics
(map :name)
(zipmap (:metrics card))
(merge bindings)))
- (assoc :score score
- :dataset_query query))))))))
+ (assoc :dataset_query query
+ :metrics (map (some-fn :name (comp metric-name :metric)) metrics)
+ :score score))))))))
(defn- matching-rules
"Return matching rules orderd by specificity.
@@ -632,7 +693,7 @@
(update :special_type keyword)
field/map->FieldInstance
(classify/run-classifiers {})
- (map #(assoc % :engine engine)))))
+ (assoc :engine engine))))
constantly))]
(as-> {:source (assoc source :fields (table->fields source))
:root root
@@ -664,8 +725,7 @@
([root rule context]
(-> rule
(select-keys [:title :description :transient_title :groups])
- (instantiate-metadata context {})
- (assoc :refinements (:cell-query root)))))
+ (instantiate-metadata context {}))))
(s/defn ^:private apply-rule
[root, rule :- rules/Rule]
@@ -673,15 +733,16 @@
dashboard (make-dashboard root rule context)
filters (->> rule
:dashboard_filters
- (mapcat (comp :matches (:dimensions context))))
+ (mapcat (comp :matches (:dimensions context)))
+ (remove (comp (singular-cell-dimensions root) id-or-name)))
cards (make-cards context rule)]
(when (or (not-empty cards)
(-> rule :cards nil?))
[(assoc dashboard
- :filters filters
- :cards cards
- :context context)
- rule])))
+ :filters filters
+ :cards cards)
+ rule
+ context])))
(def ^:private ^:const ^Long max-related 6)
(def ^:private ^:const ^Long max-cards 15)
@@ -709,16 +770,15 @@
[root, rule :- (s/maybe rules/Rule)]
(->> (rules/get-rules (concat (:rules-prefix root) [(:rule rule)]))
(keep (fn [indepth]
- (when-let [[dashboard _] (apply-rule root indepth)]
+ (when-let [[dashboard _ _] (apply-rule root indepth)]
{:title ((some-fn :short-title :title) dashboard)
:description (:description dashboard)
:url (format "%s/rule/%s/%s" (:url root) (:rule rule) (:rule indepth))})))
(hash-map :indepth)))
(defn- drilldown-fields
- [dashboard]
- (->> dashboard
- :context
+ [context]
+ (->> context
:dimensions
vals
(mapcat :matches)
@@ -786,47 +846,57 @@
[sideways sideways sideways down down up])})
(s/defn ^:private related
- "Build a balancee list of related X-rays. General composition of the list is determined for each
+ "Build a balanced list of related X-rays. General composition of the list is determined for each
root type individually via `related-selectors`. That recepie is then filled round-robin style."
- [dashboard, rule :- (s/maybe rules/Rule)]
- (let [root (-> dashboard :context :root)]
- (->> (merge (indepth root rule)
- (drilldown-fields dashboard)
- (related-entities root))
- (fill-related max-related (related-selectors (-> root :entity type)))
- (group-by :selector)
- (m/map-vals (partial map :entity)))))
+ [{:keys [root] :as context}, rule :- (s/maybe rules/Rule)]
+ (->> (merge (indepth root rule)
+ (drilldown-fields context)
+ (related-entities root))
+ (fill-related max-related (related-selectors (-> root :entity type)))
+ (group-by :selector)
+ (m/map-vals (partial map :entity))))
+
+(defn- filter-referenced-fields
+ "Return a map of fields referenced in filter cluase."
+ [root filter-clause]
+ (->> filter-clause
+ filters/collect-field-references
+ (mapcat (fn [[_ & ids]]
+ (for [id ids]
+ [id (->field root id)])))
+ (remove (comp nil? second))
+ (into {})))
(defn- automagic-dashboard
"Create dashboards for table `root` using the best matching heuristics."
- [{:keys [rule show rules-prefix query-filter cell-query full-name] :as root}]
- (if-let [[dashboard rule] (if rule
- (apply-rule root (rules/get-rule rule))
- (->> root
- (matching-rules (rules/get-rules rules-prefix))
- (keep (partial apply-rule root))
- ;; `matching-rules` returns an `ArraySeq` (via `sort-by`) so
- ;; `first` realises one element at a time (no chunking).
- first))]
- (do
+ [{:keys [rule show rules-prefix full-name] :as root}]
+ (if-let [[dashboard rule context] (if rule
+ (apply-rule root (rules/get-rule rule))
+ (->> root
+ (matching-rules (rules/get-rules rules-prefix))
+ (keep (partial apply-rule root))
+ ;; `matching-rules` returns an `ArraySeq` (via `sort-by`)
+ ;; so `first` realises one element at a time
+ ;; (no chunking).
+ first))]
+ (let [show (or show max-cards)]
(log/infof (trs "Applying heuristic %s to %s.") (:rule rule) full-name)
(log/infof (trs "Dimensions bindings:\n%s")
- (->> dashboard
- :context
+ (->> context
:dimensions
(m/map-vals #(update % :matches (partial map :name)))
u/pprint-to-str))
(log/infof (trs "Using definitions:\nMetrics:\n%s\nFilters:\n%s")
- (-> dashboard :context :metrics u/pprint-to-str)
- (-> dashboard :context :filters u/pprint-to-str))
- (-> (cond-> dashboard
- (or query-filter cell-query)
- (assoc :title (tru "A closer look at {0}" full-name)))
- (populate/create-dashboard (or show max-cards))
- (assoc :related (related dashboard rule))
- (assoc :more (when (and (-> dashboard :cards count (> max-cards))
- (not= show :all))
- (format "%s#show=all" (:url root))))))
+ (-> context :metrics u/pprint-to-str)
+ (-> context :filters u/pprint-to-str))
+ (-> dashboard
+ (populate/create-dashboard show)
+ (assoc :related (related context rule)
+ :more (when (and (not= show :all)
+ (-> dashboard :cards count (> show)))
+ (format "%s#show=all" (:url root)))
+ :transient_filters (:query-filter context)
+ :param_fields (->> context :query-filter (filter-referenced-fields root)))))
(throw (ex-info (trs "Can''t create dashboard for {0}" full-name)
{:root root
:available-rules (map :rule (or (some-> rule rules/get-rule vector)
@@ -858,11 +928,11 @@
qp.util/normalize-token
(= :metric))
(-> aggregation-clause second Metric)
- (let [metric (metric/map->MetricInstance
- {:definition {:aggregation [aggregation-clause]
- :source_table (:table_id question)}
- :table_id (:table_id question)})]
- (assoc metric :name (metric->description root metric)))))
+ (let [table-id (qp.util/get-normalized question :table-id)]
+ (metric/map->MetricInstance {:definition {:aggregation [aggregation-clause]
+ :source_table table-id}
+ :name (metric->description root aggregation-clause)
+ :table_id table-id}))))
(qp.util/get-in-normalized question [:dataset_query :query :aggregation])))
(defn- collect-breakout-fields
@@ -876,48 +946,158 @@
(defn- decompose-question
[root question opts]
(map #(automagic-analysis % (assoc opts
- :source (:source root)
- :database (:database root)))
+ :source (:source root)
+ :query-filter (:query-filter root)
+ :database (:database root)))
(concat (collect-metrics root question)
(collect-breakout-fields root question))))
+(defn- pluralize
+ [x]
+ (case (mod x 10)
+ 1 (tru "{0}st" x)
+ 2 (tru "{0}nd" x)
+ 3 (tru "{0}rd" x)
+ (tru "{0}th" x)))
+
+(defn- humanize-datetime
+ [dt unit]
+ (let [dt (date/str->date-time dt)
+ tz (.getID ^TimeZone @date/jvm-timezone)
+ unparse-with-formatter (fn [formatter dt]
+ (t.format/unparse
+ (t.format/formatter formatter (t/time-zone-for-id tz))
+ dt))]
+ (case unit
+ :minute (tru "at {0}" (unparse-with-formatter "h:mm a, MMMM d, YYYY" dt))
+ :hour (tru "at {0}" (unparse-with-formatter "h a, MMMM d, YYYY" dt))
+ :day (tru "on {0}" (unparse-with-formatter "MMMM d, YYYY" dt))
+ :week (tru "in {0} week - {1}"
+ (pluralize (date/date-extract :week-of-year dt tz))
+ (str (date/date-extract :year dt tz)))
+ :month (tru "in {0}" (unparse-with-formatter "MMMM YYYY" dt))
+ :quarter (tru "in Q{0} - {1}"
+ (date/date-extract :quarter-of-year dt tz)
+ (str (date/date-extract :year dt tz)))
+ :year (unparse-with-formatter "YYYY" dt)
+ :day-of-week (unparse-with-formatter "EEEE" dt)
+ :hour-of-day (tru "at {0}" (unparse-with-formatter "h a" dt))
+ :month-of-year (unparse-with-formatter "MMMM" dt)
+ :quarter-of-year (tru "Q{0}" (date/date-extract :quarter-of-year dt tz))
+ (:minute-of-hour
+ :day-of-month
+ :day-of-year
+ :week-of-year) (date/date-extract unit dt tz))))
+
+(defn- field-reference->field
+ [root field-reference]
+ (cond-> (->> field-reference
+ filters/collect-field-references
+ first
+ filters/field-reference->id
+ (->field root))
+ (-> field-reference first qp.util/normalize-token (= :datetime-field))
+ (assoc :unit (-> field-reference last qp.util/normalize-token))))
+
+(defmulti
+ ^{:private true
+ :arglists '([fieldset [op & args]])}
+ humanize-filter-value (fn [_ [op & args]]
+ (qp.util/normalize-token op)))
+
+(def ^:private unit-name (comp {:minute-of-hour "minute"
+ :hour-of-day "hour"
+ :day-of-week "day of week"
+ :day-of-month "day of month"
+ :day-of-year "day of year"
+ :week-of-year "week"
+ :month-of-year "month"
+ :quarter-of-year "quarter"}
+ qp.util/normalize-token))
+
+(defn- field-name
+ ([root field-reference]
+ (->> field-reference (field-reference->field root) field-name))
+ ([{:keys [display_name unit] :as field}]
+ (cond->> display_name
+ (and (filters/periodic-datetime? field) unit) (format "%s of %s" (unit-name unit)))))
+
+(defmethod humanize-filter-value :=
+ [root [_ field-reference value]]
+ (let [field (field-reference->field root field-reference)
+ field-name (field-name field)]
+ (if (or (filters/datetime? field)
+ (filters/periodic-datetime? field))
+ (tru "{0} is {1}" field-name (humanize-datetime value (:unit field)))
+ (tru "{0} is {1}" field-name value))))
+
+(defmethod humanize-filter-value :between
+ [root [_ field-reference min-value max-value]]
+ (tru "{0} is between {1} and {2}" (field-name root field-reference) min-value max-value))
+
+(defmethod humanize-filter-value :inside
+ [root [_ lat-reference lon-reference lat-max lon-min lat-min lon-max]]
+ (tru "{0} is between {1} and {2}; and {3} is between {4} and {5}"
+ (field-name root lon-reference) lon-min lon-max
+ (field-name root lat-reference) lat-min lat-max))
+
+(defmethod humanize-filter-value :and
+ [root [_ & clauses]]
+ (->> clauses
+ (map (partial humanize-filter-value root))
+ join-enumeration))
+
+(defn- cell-title
+ [root cell-query]
+ (str/join " " [(->> (qp.util/get-in-normalized (-> root :entity) [:dataset_query :query :aggregation])
+ (metric->description root))
+ (tru "where {0}" (humanize-filter-value root cell-query))]))
+
(defmethod automagic-analysis (type Card)
[card {:keys [cell-query] :as opts}]
- (let [root (->root card)]
- (if (or (table-like? card)
- cell-query)
+ (let [root (->root card)
+ cell-url (format "%squestion/%s/cell/%s" public-endpoint
+ (u/get-id card)
+ (encode-base64-json cell-query))]
+ (if (table-like? card)
(automagic-dashboard
(merge (cond-> root
- cell-query (merge {:url (format "%squestion/%s/cell/%s" public-endpoint
- (u/get-id card)
- (encode-base64-json cell-query))
+ cell-query (merge {:url cell-url
:entity (:source root)
:rules-prefix ["table"]}))
opts))
(let [opts (assoc opts :show :all)]
- (->> (decompose-question root card opts)
- (apply populate/merge-dashboards (automagic-dashboard root))
- (merge {:related (related {:context {:root {:entity card}}} nil)}))))))
+ (cond-> (apply populate/merge-dashboards
+ (automagic-dashboard (merge (cond-> root
+ cell-query (assoc :url cell-url))
+ opts))
+ (decompose-question root card opts))
+ cell-query (merge (let [title (tru "A closer look at {0}" (cell-title root cell-query))]
+ {:transient_name title
+ :name title})))))))
(defmethod automagic-analysis (type Query)
[query {:keys [cell-query] :as opts}]
- (let [root (->root query)]
- (if (or (table-like? query)
- (:cell-query opts))
+ (let [root (->root query)
+ cell-url (format "%sadhoc/%s/cell/%s" public-endpoint
+ (encode-base64-json (:dataset_query query))
+ (encode-base64-json cell-query))]
+ (if (table-like? query)
(automagic-dashboard
(merge (cond-> root
- cell-query (merge {:url (format "%sadhoc/%s/cell/%s" public-endpoint
- (encode-base64-json (:dataset_query query))
- (encode-base64-json cell-query))
+ cell-query (merge {:url cell-url
:entity (:source root)
:rules-prefix ["table"]}))
- (update opts :cell-query
- (partial filters/inject-refinement
- (qp.util/get-in-normalized query [:dataset_query :query :filter])))))
+ opts))
(let [opts (assoc opts :show :all)]
- (->> (decompose-question root query opts)
- (apply populate/merge-dashboards (automagic-dashboard root))
- (merge {:related (related {:context {:root {:entity query}}} nil)}))))))
+ (cond-> (apply populate/merge-dashboards
+ (automagic-dashboard (merge (cond-> root
+ cell-query (assoc :url cell-url))
+ opts))
+ (decompose-question root query opts))
+ cell-query (merge (let [title (tru "A closer look at the {0}" (cell-title root cell-query))]
+ {:transient_name title
+ :name title})))))))
(defmethod automagic-analysis (type Field)
[field opts]
@@ -930,8 +1110,7 @@
:from [Field]
:where [:in :table_id (map u/get-id tables)]
:group-by [:table_id]})
- (into {} (map (fn [{:keys [count table_id]}]
- [table_id count]))))
+ (into {} (map (juxt :table_id :count))))
list-like? (->> (when-let [candidates (->> field-count
(filter (comp (partial >= 2) val))
(map key)
diff --git a/src/metabase/automagic_dashboards/filters.clj b/src/metabase/automagic_dashboards/filters.clj
index d6d7c5b8c7e63..a560f807175fb 100644
--- a/src/metabase/automagic_dashboards/filters.clj
+++ b/src/metabase/automagic_dashboards/filters.clj
@@ -11,7 +11,7 @@
[(s/one (s/constrained su/KeywordOrString
(comp #{:field-id :fk-> :field-literal} qp.util/normalize-token))
"head")
- s/Any])
+ (s/cond-pre s/Int su/KeywordOrString)])
(def ^:private ^{:arglists '([form])} field-reference?
"Is given form an MBQL field reference?"
@@ -25,7 +25,7 @@
(defmethod field-reference->id :field-id
[[_ id]]
(if (sequential? id)
- (second id)
+ (field-reference->id id)
id))
(defmethod field-reference->id :fk->
@@ -44,12 +44,14 @@
(tree-seq (some-fn sequential? map?) identity)
(filter field-reference?)))
-(def ^:private ^{:arglists '([field])} periodic-datetime?
+(def ^{:arglists '([field])} periodic-datetime?
+ "Is `field` a periodic datetime (eg. day of month)?"
(comp #{:minute-of-hour :hour-of-day :day-of-week :day-of-month :day-of-year :week-of-year
:month-of-year :quarter-of-year}
:unit))
-(defn- datetime?
+(defn datetime?
+ "Is `field` a datetime?"
[field]
(and (not (periodic-datetime? field))
(or (isa? (:base_type field) :type/DateTime)
@@ -154,10 +156,10 @@
remove-unqualified
(sort-by interestingness >)
(take max-filters)
- (map #(assoc % :fk-map (build-fk-map fks %)))
(reduce
(fn [dashboard candidate]
- (let [filter-id (-> candidate hash str)
+ (let [filter-id (-> candidate ((juxt :id :name :unit)) hash str)
+ candidate (assoc candidate :fk-map (build-fk-map fks candidate))
dashcards (:ordered_cards dashboard)
dashcards-new (map #(add-filter % filter-id candidate) dashcards)]
;; Only add filters that apply to all cards.
@@ -172,17 +174,6 @@
dashboard)))))
-(defn filter-referenced-fields
- "Return a map of fields referenced in filter cluase."
- [filter-clause]
- (->> filter-clause
- collect-field-references
- (mapcat (fn [[_ & ids]]
- (for [id ids]
- [id (Field id)])))
- (into {})))
-
-
(defn- flatten-filter-clause
[filter-clause]
(when (not-empty filter-clause)
@@ -208,4 +199,4 @@
(->> filter-clause
flatten-filter-clause
(remove (comp in-refinement? collect-field-references))
- (reduce merge-filter-clauses refinement))))
+ (apply merge-filter-clauses refinement))))
diff --git a/src/metabase/automagic_dashboards/populate.clj b/src/metabase/automagic_dashboards/populate.clj
index fb1c1c048d320..60fd4bce4fa89 100644
--- a/src/metabase/automagic_dashboards/populate.clj
+++ b/src/metabase/automagic_dashboards/populate.clj
@@ -4,10 +4,9 @@
[clojure.tools.logging :as log]
[metabase.api.common :as api]
[metabase.automagic-dashboards.filters :as filters]
- [metabase.models
- [card :as card]
- [field :refer [Field]]]
+ [metabase.models.card :as card]
[metabase.query-processor.util :as qp.util]
+ [metabase.util :as u]
[puppetlabs.i18n.core :as i18n :refer [trs]]
[toucan.db :as db]))
@@ -80,17 +79,17 @@
(defn- visualization-settings
[{:keys [metrics x_label y_label series_labels visualization dimensions] :as card}]
- (let [metric-name (some-fn :name (comp str/capitalize name first :metric))
- [display visualization-settings] visualization]
+ (let [[display visualization-settings] visualization]
{:display display
- :visualization_settings
- (-> visualization-settings
- (merge (colorize card))
- (cond->
- (some :name metrics) (assoc :graph.series_labels (map metric-name metrics))
- series_labels (assoc :graph.series_labels series_labels)
- x_label (assoc :graph.x_axis.title_text x_label)
- y_label (assoc :graph.y_axis.title_text y_label)))}))
+ :visualization_settings (-> visualization-settings
+ (assoc :graph.series_labels metrics)
+ (merge (colorize card))
+ (cond->
+ series_labels (assoc :graph.series_labels series_labels)
+
+ x_label (assoc :graph.x_axis.title_text x_label)
+
+ y_label (assoc :graph.y_axis.title_text y_label)))}))
(defn- add-card
"Add a card to dashboard `dashboard` at position [`x`, `y`]."
@@ -236,15 +235,13 @@
(defn create-dashboard
"Create dashboard and populate it with cards."
([dashboard] (create-dashboard dashboard :all))
- ([{:keys [title transient_title description groups filters cards refinements fieldset]} n]
+ ([{:keys [title transient_title description groups filters cards refinements]} n]
(let [n (cond
(= n :all) (count cards)
(keyword? n) (Integer/parseInt (name n))
:else n)
dashboard {:name title
:transient_name (or transient_title title)
- :transient_filters refinements
- :param_fields (filters/filter-referenced-fields refinements)
:description description
:creator_id api/*current-user-id*
:parameters []}
@@ -265,43 +262,62 @@
(cond-> dashboard
(not-empty filters) (filters/add-filters filters max-filters)))))
+(defn- downsize-titles
+ [markdown]
+ (->> markdown
+ str/split-lines
+ (map (fn [line]
+ (if (str/starts-with? line "#")
+ (str "#" line)
+ line)))
+ str/join))
+
+(defn- merge-filters
+ [ds]
+ (when (->> ds
+ (mapcat :ordered_cards)
+ (keep (comp :table_id :card))
+ distinct
+ count
+ (= 1))
+ [(->> ds (mapcat :parameters) distinct)
+ (->> ds
+ (mapcat :ordered_cards)
+ (mapcat :parameter_mappings)
+ (map #(dissoc % :card_id))
+ distinct)]))
+
(defn merge-dashboards
"Merge dashboards `ds` into dashboard `d`."
- [d & ds]
- (let [filter-targets (when (->> ds
- (mapcat :ordered_cards)
- (keep (comp :table_id :card))
- distinct
- count
- (= 1))
- (->> ds
- (mapcat :ordered_cards)
- (mapcat :parameter_mappings)
- (mapcat (comp filters/collect-field-references :target))
- (map filters/field-reference->id)
- distinct
- (map Field)))]
- (cond-> (reduce
- (fn [target dashboard]
- (let [offset (->> target
- :ordered_cards
- (map #(+ (:row %) (:sizeY %)))
- (apply max -1) ; -1 so it neturalizes +1 for spacing if
- ; the target dashboard is empty.
- inc)]
- (-> target
- (add-text-card {:width grid-width
- :height group-heading-height
- :text (format "# %s" (:name dashboard))
- :visualization-settings {:dashcard.background false
- :text.align_vertical :bottom}}
- [offset 0])
- (update :ordered_cards concat
- (->> dashboard
- :ordered_cards
- (map #(-> %
- (update :row + offset group-heading-height)
- (dissoc :parameter_mappings))))))))
- d
- ds)
- (not-empty filter-targets) (filters/add-filters filter-targets max-filters))))
+ [& ds]
+ (let [[paramters parameter-mappings] (merge-filters ds)]
+ (reduce
+ (fn [target dashboard]
+ (let [offset (->> target
+ :ordered_cards
+ (map #(+ (:row %) (:sizeY %)))
+ (apply max -1) ; -1 so it neturalizes +1 for spacing if
+ ; the target dashboard is empty.
+ inc)
+ cards (->> dashboard
+ :ordered_cards
+ (map #(-> %
+ (update :row + offset group-heading-height)
+ (u/update-in-when [:visualization_settings :text]
+ downsize-titles)
+ (assoc :parameter_mappings
+ (when (:card_id %)
+ (for [mapping parameter-mappings]
+ (assoc mapping :card_id (:card_id %))))))))]
+ (-> target
+ (add-text-card {:width grid-width
+ :height group-heading-height
+ :text (format "# %s" (:name dashboard))
+ :visualization-settings {:dashcard.background false
+ :text.align_vertical :bottom}}
+ [offset 0])
+ (update :ordered_cards concat cards))))
+ (-> ds
+ first
+ (assoc :parameters paramters))
+ (rest ds))))
diff --git a/src/metabase/automagic_dashboards/rules.clj b/src/metabase/automagic_dashboards/rules.clj
index 50bad7029869e..6e6842033f60c 100644
--- a/src/metabase/automagic_dashboards/rules.clj
+++ b/src/metabase/automagic_dashboards/rules.clj
@@ -4,6 +4,7 @@
[clojure.string :as str]
[clojure.tools.logging :as log]
[metabase.automagic-dashboards.populate :as populate]
+ [metabase.query-processor.util :as qp.util]
[metabase.util :as u]
[metabase.util.schema :as su]
[puppetlabs.i18n.core :as i18n :refer [trs]]
@@ -119,8 +120,7 @@
(mapcat (comp k val first) cards))
(def ^:private DimensionForm
- [(s/one (s/constrained (s/cond-pre s/Str s/Keyword)
- (comp #{"dimension"} str/lower-case name))
+ [(s/one (s/constrained (s/cond-pre s/Str s/Keyword) (comp #{:dimension} qp.util/normalize-token))
"dimension")
(s/one s/Str "identifier")
su/Map])
diff --git a/src/metabase/db.clj b/src/metabase/db.clj
index 78110be4483e7..375451011125f 100644
--- a/src/metabase/db.clj
+++ b/src/metabase/db.clj
@@ -1,5 +1,5 @@
(ns metabase.db
- "Database definition and helper functions for interacting with the database."
+ "Application database definition, and setup logic, and helper functions for interacting with it."
(:require [clojure
[string :as s]
[walk :as walk]]
@@ -11,6 +11,7 @@
[config :as config]
[util :as u]]
[metabase.db.spec :as dbspec]
+ [puppetlabs.i18n.core :refer [trs]]
[ring.util.codec :as codec]
[toucan.db :as db])
(:import com.mchange.v2.c3p0.ComboPooledDataSource
@@ -45,8 +46,9 @@
[(System/getProperty "user.dir") "/" db-file-name options]))))))
(defn- parse-connection-string
- "Parse a DB connection URI like `postgres://cam@localhost.com:5432/cams_cool_db?ssl=true&sslfactory=org.postgresql.ssl.NonValidatingFactory`
- and return a broken-out map."
+ "Parse a DB connection URI like
+ `postgres://cam@localhost.com:5432/cams_cool_db?ssl=true&sslfactory=org.postgresql.ssl.NonValidatingFactory` and
+ return a broken-out map."
[uri]
(when-let [[_ protocol user pass host port db query] (re-matches #"^([^:/@]+)://(?:([^:/@]+)(?::([^:@]+))?@)?([^:@]+)(?::(\d+))?/([^/?]+)(?:\?(.*))?$" uri)]
(merge {:type (case (keyword protocol)
@@ -73,8 +75,8 @@
(config/config-kw :mb-db-type)))
(def db-connection-details
- "Connection details that can be used when pretending the Metabase DB is itself a `Database`
- (e.g., to use the Generic SQL driver functions on the Metabase DB itself)."
+ "Connection details that can be used when pretending the Metabase DB is itself a `Database` (e.g., to use the Generic
+ SQL driver functions on the Metabase DB itself)."
(delay (or @connection-string-details
(case (db-type)
:h2 {:type :h2 ; TODO - we probably don't need to specifc `:type` here since we can just call (db-type)
@@ -112,19 +114,19 @@
(def ^:private ^:const ^String changelog-file "liquibase.yaml")
(defn- migrations-sql
- "Return a string of SQL containing the DDL statements needed to perform unrun LIQUIBASE migrations."
+ "Return a string of SQL containing the DDL statements needed to perform unrun `liquibase` migrations."
^String [^Liquibase liquibase]
(let [writer (StringWriter.)]
(.update liquibase "" writer)
(.toString writer)))
(defn- migrations-lines
- "Return a sequnce of DDL statements that should be used to perform migrations for LIQUIBASE.
+ "Return a sequnce of DDL statements that should be used to perform migrations for `liquibase`.
- MySQL gets snippy if we try to run the entire DB migration as one single string; it seems to only like it if we run
- one statement at a time; Liquibase puts each DDL statement on its own line automatically so just split by lines and
- filter out blank / comment lines. Even though this is not neccesary for H2 or Postgres go ahead and do it anyway
- because it keeps the code simple and doesn't make a significant performance difference."
+ MySQL gets snippy if we try to run the entire DB migration as one single string; it seems to only like it if we run
+ one statement at a time; Liquibase puts each DDL statement on its own line automatically so just split by lines and
+ filter out blank / comment lines. Even though this is not neccesary for H2 or Postgres go ahead and do it anyway
+ because it keeps the code simple and doesn't make a significant performance difference."
[^Liquibase liquibase]
(for [line (s/split-lines (migrations-sql liquibase))
:when (not (or (s/blank? line)
@@ -132,57 +134,67 @@
line))
(defn- has-unrun-migrations?
- "Does LIQUIBASE have migration change sets that haven't been run yet?
+ "Does `liquibase` have migration change sets that haven't been run yet?
- It's a good idea to Check to make sure there's actually something to do before running `(migrate :up)` because
- `migrations-sql` will always contain SQL to create and release migration locks, which is both slightly dangerous
- and a waste of time when we won't be using them."
+ It's a good idea to Check to make sure there's actually something to do before running `(migrate :up)` because
+ `migrations-sql` will always contain SQL to create and release migration locks, which is both slightly dangerous and
+ a waste of time when we won't be using them."
^Boolean [^Liquibase liquibase]
(boolean (seq (.listUnrunChangeSets liquibase nil))))
-(defn- has-migration-lock?
+(defn- migration-lock-exists?
"Is a migration lock in place for LIQUIBASE?"
^Boolean [^Liquibase liquibase]
(boolean (seq (.listLocks liquibase))))
(defn- wait-for-migration-lock-to-be-cleared
- "Check and make sure the database isn't locked. If it is, sleep for 2 seconds and then retry several times.
- There's a chance the lock will end up clearing up so we can run migrations normally."
+ "Check and make sure the database isn't locked. If it is, sleep for 2 seconds and then retry several times. There's a
+ chance the lock will end up clearing up so we can run migrations normally."
[^Liquibase liquibase]
(u/auto-retry 5
- (when (has-migration-lock? liquibase)
+ (when (migration-lock-exists? liquibase)
(Thread/sleep 2000)
- (throw (Exception. (str "Database has migration lock; cannot run migrations. You can force-release these locks "
- "by running `java -jar metabase.jar migrate release-locks`."))))))
+ (throw
+ (Exception.
+ (str
+ (trs "Database has migration lock; cannot run migrations.")
+ " "
+ (trs "You can force-release these locks by running `java -jar metabase.jar migrate release-locks`.")))))))
(defn- migrate-up-if-needed!
- "Run any unrun LIQUIBASE migrations, if needed.
+ "Run any unrun `liquibase` migrations, if needed.
- This creates SQL for the migrations to be performed, then executes each DDL statement.
- Running `.update` directly doesn't seem to work as we'd expect; it ends up commiting the changes made and they
- can't be rolled back at the end of the transaction block. Converting the migration to SQL string and running that
- via `jdbc/execute!` seems to do the trick."
+ This creates SQL for the migrations to be performed, then executes each DDL statement. Running `.update` directly
+ doesn't seem to work as we'd expect; it ends up commiting the changes made and they can't be rolled back at the end
+ of the transaction block. Converting the migration to SQL string and running that via `jdbc/execute!` seems to do
+ the trick."
[conn, ^Liquibase liquibase]
- (log/info "Checking if Database has unrun migrations...")
+ (log/info (trs "Checking if Database has unrun migrations..."))
(when (has-unrun-migrations? liquibase)
- (log/info "Database has unrun migrations. Waiting for migration lock to be cleared...")
+ (log/info (trs "Database has unrun migrations. Waiting for migration lock to be cleared..."))
(wait-for-migration-lock-to-be-cleared liquibase)
- (log/info "Migration lock is cleared. Running migrations...")
- (doseq [line (migrations-lines liquibase)]
- (jdbc/execute! conn [line]))))
+ ;; while we were waiting for the lock, it was possible that another instance finished the migration(s), so make
+ ;; sure something still needs to be done...
+ (if (has-unrun-migrations? liquibase)
+ (do
+ (log/info (trs "Migration lock is cleared. Running migrations..."))
+ (doseq [line (migrations-lines liquibase)]
+ (jdbc/execute! conn [line])))
+ (log/info
+ (trs "Migration lock cleared, but nothing to do here! Migrations were finished by another instance.")))))
(defn- force-migrate-up-if-needed!
"Force migrating up. This does two things differently from `migrate-up-if-needed!`:
- 1. This doesn't check to make sure the DB locks are cleared
- 2. Any DDL statements that fail are ignored
+ 1. This doesn't check to make sure the DB locks are cleared
+ 2. Any DDL statements that fail are ignored
- It can be used to fix situations where the database got into a weird state, as was common before the fixes made in
- #3295.
+ It can be used to fix situations where the database got into a weird state, as was common before the fixes made in
+ #3295.
- Each DDL statement is ran inside a nested transaction; that way if the nested transaction fails we can roll it back
- without rolling back the entirety of changes that were made. (If a single statement in a transaction fails you
- can't do anything futher until you clear the error state by doing something like calling `.rollback`.)"
+ Each DDL statement is ran inside a nested transaction; that way if the nested transaction fails we can roll it back
+ without rolling back the entirety of changes that were made. (If a single statement in a transaction fails you can't
+ do anything futher until you clear the error state by doing something like calling `.rollback`.)"
[conn, ^Liquibase liquibase]
(.clearCheckSums liquibase)
(when (has-unrun-migrations? liquibase)
@@ -197,7 +209,7 @@
(def ^{:arglists '([])} ^DatabaseFactory database-factory
"Return an instance of the Liquibase `DatabaseFactory`. This is done on a background thread at launch because
- otherwise it adds 2 seconds to startup time."
+ otherwise it adds 2 seconds to startup time."
(partial deref (future (DatabaseFactory/getInstance))))
(defn- conn->liquibase
@@ -222,7 +234,8 @@
"DATABASECHANGELOG"
"databasechangelog")
fresh-install? (jdbc/with-db-metadata [meta (jdbc-details)] ;; don't migrate on fresh install
- (empty? (jdbc/metadata-query (.getTables meta nil nil liquibases-table-name (into-array String ["TABLE"])))))
+ (empty? (jdbc/metadata-query
+ (.getTables meta nil nil liquibases-table-name (into-array String ["TABLE"])))))
query (format "UPDATE %s SET FILENAME = ?" liquibases-table-name)]
(when-not fresh-install?
(jdbc/execute! conn [query "migrations/000_migrations.yaml"]))))
@@ -252,11 +265,11 @@
;; Disable auto-commit. This should already be off but set it just to be safe
(.setAutoCommit (jdbc/get-connection conn) false)
;; Set up liquibase and let it do its thing
- (log/info "Setting up Liquibase...")
+ (log/info (trs "Setting up Liquibase..."))
(try
(let [liquibase (conn->liquibase conn)]
(consolidate-liquibase-changesets conn)
- (log/info "Liquibase is ready.")
+ (log/info (trs "Liquibase is ready."))
(case direction
:up (migrate-up-if-needed! conn liquibase)
:force (force-migrate-up-if-needed! conn liquibase)
@@ -279,7 +292,7 @@
;;; +----------------------------------------------------------------------------------------------------------------+
(defn connection-pool
- "Create a C3P0 connection pool for the given database SPEC."
+ "Create a C3P0 connection pool for the given database `spec`."
[{:keys [subprotocol subname classname minimum-pool-size idle-connection-test-period excess-timeout]
:or {minimum-pool-size 3
idle-connection-test-period 0
@@ -348,12 +361,12 @@
(verify-db-connection (:type db-details) db-details))
([engine details]
{:pre [(keyword? engine) (map? details)]}
- (log/info (u/format-color 'cyan "Verifying %s Database Connection ..." (name engine)))
+ (log/info (u/format-color 'cyan (trs "Verifying {0} Database Connection ..." (name engine))))
(assert (binding [*allow-potentailly-unsafe-connections* true]
(require 'metabase.driver)
((resolve 'metabase.driver/can-connect-with-details?) engine details))
(format "Unable to connect to Metabase %s DB." (name engine)))
- (log/info "Verify Database Connection ... " (u/emoji "✅"))))
+ (log/info (trs "Verify Database Connection ... ") (u/emoji "✅"))))
(def ^:dynamic ^Boolean *disable-data-migrations*
@@ -379,11 +392,24 @@
(defn- run-schema-migrations!
"Run through our DB migration process and make sure DB is fully prepared"
[auto-migrate? db-details]
- (log/info "Running Database Migrations...")
+ (log/info (trs "Running Database Migrations..."))
(if auto-migrate?
- (migrate! db-details :up)
+ ;; There is a weird situation where running the migrations can cause a race condition: if two (or more) instances
+ ;; in a horizontal cluster are started at the exact same time, they can all start running migrations (and all
+ ;; acquire a lock) at the exact same moment. Since they all acquire a lock at the same time, none of them would
+ ;; have been blocked from starting by the lock being in place. (Yes, this not working sort of defeats the whole
+ ;; purpose of the lock in the first place, but this *is* Liquibase.)
+ ;;
+ ;; So what happens is one instance will ultimately end up commiting the transaction first (and succeed), while the
+ ;; others will fail due to duplicate tables or the like and have their transactions rolled back.
+ ;;
+ ;; However, we don't want to have that instance killed because its migrations failed for that reason, so retry a
+ ;; second time; this time, it will either run into the lock, or see that there are no migrations to run in the
+ ;; first place, and launch normally.
+ (u/auto-retry 1
+ (migrate! db-details :up))
(print-migrations-and-quit! db-details))
- (log/info "Database Migrations Current ... " (u/emoji "✅")))
+ (log/info (trs "Database Migrations Current ... ") (u/emoji "✅")))
(defn- run-data-migrations!
"Do any custom code-based migrations now that the db structure is up to date."
diff --git a/src/metabase/driver.clj b/src/metabase/driver.clj
index 6abe808962059..bdf893ba77347 100644
--- a/src/metabase/driver.clj
+++ b/src/metabase/driver.clj
@@ -392,7 +392,8 @@
[clojure.lang.IPersistentMap :type/Dictionary]
[clojure.lang.IPersistentVector :type/Array]
[org.bson.types.ObjectId :type/MongoBSONID]
- [org.postgresql.util.PGobject :type/*]])
+ [org.postgresql.util.PGobject :type/*]
+ [nil :type/*]]) ; all-NULL columns in DBs like Mongo w/o explicit types
(log/warn (trs "Don''t know how to map class ''{0}'' to a Field base_type, falling back to :type/*." klass))
:type/*))
diff --git a/src/metabase/driver/mongo.clj b/src/metabase/driver/mongo.clj
index 45f504ea3bc58..2b665dc4f591d 100644
--- a/src/metabase/driver/mongo.clj
+++ b/src/metabase/driver/mongo.clj
@@ -23,9 +23,9 @@
(defn- can-connect? [details]
(with-mongo-connection [^DB conn, details]
- (= (-> (cmd/db-stats conn)
- (conv/from-db-object :keywordize)
- :ok)
+ (= (float (-> (cmd/db-stats conn)
+ (conv/from-db-object :keywordize)
+ :ok))
1.0)))
(defn- humanize-connection-error-message [message]
@@ -116,7 +116,7 @@
(defn- describe-table-field [field-kw field-info]
(let [most-common-object-type (most-common-object-type (vec (:types field-info)))]
(cond-> {:name (name field-kw)
- :database-type (.getName most-common-object-type)
+ :database-type (some-> most-common-object-type .getName)
:base-type (driver/class->base-type most-common-object-type)}
(= :_id field-kw) (assoc :pk? true)
(:special-types field-info) (assoc :special-type (->> (vec (:special-types field-info))
diff --git a/src/metabase/events.clj b/src/metabase/events.clj
index 28e245f5b2f51..11613ae0531e4 100644
--- a/src/metabase/events.clj
+++ b/src/metabase/events.clj
@@ -55,6 +55,7 @@
(defn publish-event!
"Publish an item into the events stream. Returns the published item to allow for chaining."
+ {:style/indent 1}
[topic event-item]
{:pre [(keyword topic)]}
(async/go (async/>! events-channel {:topic (keyword topic), :item event-item}))
diff --git a/src/metabase/metabot.clj b/src/metabase/metabot.clj
index 6b034ba180fad..22ece0d117f2d 100644
--- a/src/metabase/metabot.clj
+++ b/src/metabase/metabot.clj
@@ -7,6 +7,7 @@
[string :as str]]
[clojure.java.io :as io]
[clojure.tools.logging :as log]
+ [honeysql.core :as hsql]
[manifold
[deferred :as d]
[stream :as s]]
@@ -21,8 +22,10 @@
[permissions :refer [Permissions]]
[permissions-group :as perms-group]
[setting :as setting :refer [defsetting]]]
- [metabase.util.urls :as urls]
- [puppetlabs.i18n.core :refer [tru trs]]
+ [metabase.util
+ [date :as du]
+ [urls :as urls]]
+ [puppetlabs.i18n.core :refer [trs tru]]
[throttle.core :as throttle]
[toucan.db :as db]))
@@ -32,7 +35,106 @@
:default false)
-;;; ------------------------------------------------------------ Perms Checking ------------------------------------------------------------
+;;; ------------------------------------- Deciding which instance is the MetaBot -------------------------------------
+
+;; Close your eyes, and imagine a scenario: someone is running multiple Metabase instances in a horizontal cluster.
+;; Good for them, but how do we make sure one, and only one, of those instances, replies to incoming MetaBot commands?
+;; It would certainly be too much if someone ran, say, 4 instances, and typing `metabot kanye` into Slack gave them 4
+;; Kanye West quotes, wouldn't it?
+;;
+;; Luckily, we have an "elegant" solution: we'll use the Settings framework to keep track of which instance is
+;; currently serving as the MetaBot. We'll have that instance periodically check in; if it doesn't check in for some
+;; timeout interval, we'll consider the job of MetaBot up for grabs. Each instance will periodically check if the
+;; MetaBot job is open, and, if so, whoever discovers it first will take it.
+
+
+;; How do we uniquiely identify each instance?
+;;
+;; `local-process-uuid` is randomly-generated upon launch and used to identify this specific Metabase instance during
+;; this specifc run. Restarting the server will change this UUID, and each server in a hortizontal cluster will have
+;; its own ID, making this different from the `site-uuid` Setting. The local process UUID is used to differentiate
+;; different horizontally clustered MB instances so we can determine which of them will handle MetaBot duties.
+;;
+;; TODO - if we ever want to use this elsewhere, we need to move it to `metabase.config` or somewhere else central
+;; like that.
+(defonce ^:private local-process-uuid
+ (str (java.util.UUID/randomUUID)))
+
+(defsetting ^:private metabot-instance-uuid
+ "UUID of the active MetaBot instance (the Metabase process currently handling MetaBot duties.)"
+ ;; This should be cached because we'll be checking it fairly often, basically every 2 seconds as part of the
+ ;; websocket monitor thread to see whether we're MetaBot (the thread won't open the WebSocket unless that instance
+ ;; is handling MetaBot duties)
+ :internal? true)
+
+(defsetting ^:private metabot-instance-last-checkin
+ "Timestamp of the last time the active MetaBot instance checked in."
+ :internal? true
+ ;; caching is disabled for this, since it is intended to be updated frequently (once a minute or so) If we use the
+ ;; cache, it will trigger cache invalidation for all the other instances (wasteful), and possibly at any rate be
+ ;; incorrect (for example, if another instance checked in a minute ago, our local cache might not get updated right
+ ;; away, causing us to falsely assume the MetaBot role is up for grabs.)
+ :cache? false
+ :type :timestamp)
+
+(defn- current-timestamp-from-db
+ "Fetch the current timestamp from the DB. Why do this from the DB? It's not safe to assume multiple instances have
+ clocks exactly in sync; but since each instance is using the same application DB, we can use it as a cannonical
+ source of truth."
+ ^java.sql.Timestamp []
+ (-> (db/query {:select [[(hsql/raw "current_timestamp") :current_timestamp]]})
+ first
+ :current_timestamp))
+
+(defn- update-last-checkin!
+ "Update the last checkin timestamp recorded in the DB."
+ []
+ (metabot-instance-last-checkin (current-timestamp-from-db)))
+
+(defn- seconds-since-last-checkin
+ "Return the number of seconds since the active MetaBot instance last checked in (updated the
+ `metabot-instance-last-checkin` Setting). If a MetaBot instance has *never* checked in, this returns `nil`. (Since
+ `last-checkin` is one of the few Settings that isn't cached, this always requires a DB call.)"
+ []
+ (when-let [last-checkin (metabot-instance-last-checkin)]
+ (u/prog1 (-> (- (.getTime (current-timestamp-from-db))
+ (.getTime last-checkin))
+ (/ 1000))
+ (log/debug (u/format-color 'magenta (trs "Last MetaBot checkin was {0} ago." (du/format-seconds <>)))))))
+
+(def ^:private ^Integer recent-checkin-timeout-interval-seconds
+ "Number of seconds since the last MetaBot checkin that we will consider the MetaBot job to be 'up for grabs',
+ currently 3 minutes. (i.e. if the current MetaBot job holder doesn't check in for more than 3 minutes, it's up for
+ grabs.)"
+ (int (* 60 3)))
+
+(defn- last-checkin-was-not-recent?
+ "`true` if the last checkin of the active MetaBot instance was more than 3 minutes ago, or if there has never been a
+ checkin. (This requires DB calls, so it should not be called too often -- once a minute [at the time of this
+ writing] should be sufficient.)"
+ []
+ (if-let [seconds-since-last-checkin (seconds-since-last-checkin)]
+ (> seconds-since-last-checkin
+ recent-checkin-timeout-interval-seconds)
+ true))
+
+(defn- am-i-the-metabot?
+ "Does this instance currently have the MetaBot job? (Does not require any DB calls, so may safely be called
+ often (i.e. in the websocket monitor thread loop.)"
+ []
+ (= (metabot-instance-uuid)
+ local-process-uuid))
+
+(defn- become-metabot!
+ "Direct this instance to assume the duties of acting as MetaBot, and update the Settings we use to track assignment
+ accordingly."
+ []
+ (log/info (u/format-color 'green (trs "This instance will now handle MetaBot duties.")))
+ (metabot-instance-uuid local-process-uuid)
+ (update-last-checkin!))
+
+
+;;; ------------------------------------------------- Perms Checking -------------------------------------------------
(defn- metabot-permissions
"Return the set of permissions granted to the MetaBot."
@@ -50,7 +152,7 @@
`(do-with-metabot-permissions (fn [] ~@body)))
-;;; # ------------------------------------------------------------ Metabot Command Handlers ------------------------------------------------------------
+;;; -------------------------------------------- Metabot Command Handlers --------------------------------------------
(def ^:private ^:dynamic *channel-id* nil)
@@ -110,15 +212,18 @@
(defn- card-with-name [card-name]
(first (u/prog1 (db/select [Card :id :name], :%lower.name [:like (str \% (str/lower-case card-name) \%)])
(when (> (count <>) 1)
- (throw (Exception. (str (tru "Could you be a little more specific? I found these cards with names that matched:\n{0}"
- (format-cards <>)))))))))
+ (throw (Exception.
+ (str (tru "Could you be a little more specific? I found these cards with names that matched:\n{0}"
+ (format-cards <>)))))))))
(defn- id-or-name->card [card-id-or-name]
(cond
(integer? card-id-or-name) (db/select-one [Card :id :name], :id card-id-or-name)
(or (string? card-id-or-name)
(symbol? card-id-or-name)) (card-with-name card-id-or-name)
- :else (throw (Exception. (str (tru "I don''t know what Card `{0}` is. Give me a Card ID or name." card-id-or-name))))))
+ :else (throw (Exception.
+ (str (tru "I don''t know what Card `{0}` is. Give me a Card ID or name."
+ card-id-or-name))))))
(defn ^:metabot show
@@ -130,13 +235,16 @@
(do
(with-metabot-permissions
(read-check Card card-id))
- (do-async (let [attachments (pulse/create-and-upload-slack-attachments! (pulse/create-slack-attachment-data [(pulse/execute-card card-id, :context :metabot)]))]
+ (do-async (let [attachments (pulse/create-and-upload-slack-attachments!
+ (pulse/create-slack-attachment-data
+ [(pulse/execute-card card-id, :context :metabot)]))]
(slack/post-chat-message! *channel-id*
nil
attachments)))
(tru "Ok, just a second..."))
(throw (Exception. (str (tru "Not Found"))))))
- ;; If the card name comes without spaces, e.g. (show 'my 'wacky 'card) turn it into a string an recur: (show "my wacky card")
+ ;; If the card name comes without spaces, e.g. (show 'my 'wacky 'card) turn it into a string an recur: (show "my
+ ;; wacky card")
([word & more]
(show (str/join " " (cons word more)))))
@@ -171,7 +279,7 @@
(str ":kanye:\n> " (rand-nth @kanye-quotes)))
-;;; # ------------------------------------------------------------ Metabot Command Dispatch ------------------------------------------------------------
+;;; -------------------------------------------- Metabot Command Dispatch --------------------------------------------
(def ^:private apply-metabot-fn
(dispatch-fn "understand" :metabot))
@@ -189,7 +297,7 @@
(apply apply-metabot-fn tokens)))))
-;;; # ------------------------------------------------------------ Metabot Input Handling ------------------------------------------------------------
+;;; --------------------------------------------- Metabot Input Handling ---------------------------------------------
(defn- message->command-str
"Get the command portion of a message *event* directed at Metabot.
@@ -241,7 +349,7 @@
nil)))))
-;;; # ------------------------------------------------------------ Websocket Connection Stuff ------------------------------------------------------------
+;;; ------------------------------------------- Websocket Connection Stuff -------------------------------------------
(defn- connect-websocket! []
(when-let [websocket-url (slack/websocket-url)]
@@ -264,9 +372,10 @@
;; and if it is no longer equal to theirs they should die
(defonce ^:private websocket-monitor-thread-id (atom nil))
-;; we'll use a THROTTLER to implement exponential backoff for recconenction attempts, since THROTTLERS are designed with for this sort of thing
-;; e.g. after the first failed connection we'll wait 2 seconds, then each that amount increases by the `:delay-exponent` of 1.3
-;; so our reconnection schedule will look something like:
+;; we'll use a THROTTLER to implement exponential backoff for recconenction attempts, since THROTTLERS are designed
+;; with for this sort of thing e.g. after the first failed connection we'll wait 2 seconds, then each that amount
+;; increases by the `:delay-exponent` of 1.3. So our reconnection schedule will look something like:
+;;
;; number of consecutive failed attempts | seconds before next try (rounded up to nearest multiple of 2 seconds)
;; --------------------------------------+----------------------------------------------------------------------
;; 0 | 2
@@ -276,47 +385,91 @@
;; 4 | 8
;; 5 | 14
;; 6 | 30
-;; we'll throttle this based on values of the `slack-token` setting; that way if someone changes its value they won't have to wait
-;; whatever the exponential delay is before the connection is retried
+;;
+;; we'll throttle this based on values of the `slack-token` setting; that way if someone changes its value they won't
+;; have to wait whatever the exponential delay is before the connection is retried
(def ^:private reconnection-attempt-throttler
(throttle/make-throttler nil :attempts-threshold 1, :initial-delay-ms 2000, :delay-exponent 1.3))
(defn- should-attempt-to-reconnect? ^Boolean []
- (boolean (u/ignore-exceptions
- (throttle/check reconnection-attempt-throttler (slack/slack-token))
- true)))
+ (boolean
+ (u/ignore-exceptions
+ (throttle/check reconnection-attempt-throttler (slack/slack-token))
+ true)))
+
+(defn- reopen-websocket-connection-if-needed!
+ "Check to see if websocket connection is [still] open, [re-]open it if not."
+ []
+ ;; Only open the Websocket connection if this instance is the MetaBot
+ (when (am-i-the-metabot?)
+ (when (= (.getId (Thread/currentThread)) @websocket-monitor-thread-id)
+ (try
+ (when (or (not @websocket)
+ (s/closed? @websocket))
+ (log/debug (trs "MetaBot WebSocket is closed. Reconnecting now."))
+ (connect-websocket!))
+ (catch Throwable e
+ (log/error (trs "Error connecting websocket:") (.getMessage e)))))))
(defn- start-websocket-monitor! []
(future
(reset! websocket-monitor-thread-id (.getId (Thread/currentThread)))
- ;; Every 2 seconds check to see if websocket connection is [still] open, [re-]open it if not
(loop []
+ ;; Every 2 seconds...
(while (not (should-attempt-to-reconnect?))
(Thread/sleep 2000))
- (when (= (.getId (Thread/currentThread)) @websocket-monitor-thread-id)
- (try
- (when (or (not @websocket)
- (s/closed? @websocket))
- (log/debug (trs "MetaBot WebSocket is closed. Reconnecting now."))
- (connect-websocket!))
- (catch Throwable e
- (log/error (trs "Error connecting websocket:") (.getMessage e))))
- (recur)))))
+ (reopen-websocket-connection-if-needed!)
+ (recur))))
+
+
+(defn- check-and-update-instance-status!
+ "Check whether the current instance is serving as the MetaBot; if so, update the last checkin timestamp; if not, check
+ whether we should become the MetaBot (and do so if we should)."
+ []
+ (cond
+ ;; if we're already the MetaBot instance, update the last checkin timestamp
+ (am-i-the-metabot?)
+ (do
+ (log/debug (trs "This instance is performing MetaBot duties."))
+ (update-last-checkin!))
+ ;; otherwise if the last checkin was too long ago, it's time for us to assume the mantle of MetaBot
+ (last-checkin-was-not-recent?)
+ (become-metabot!)
+ ;; otherwise someone else is the MetaBot and we're done here! woo
+ :else
+ (log/debug (u/format-color 'blue (trs "Another instance is already handling MetaBot duties.")))))
+
+(defn- start-instance-monitor! []
+ (future
+ (loop []
+ (check-and-update-instance-status!)
+ (Thread/sleep (* 60 1000))
+ (recur))))
+
+(defn- seconds-to-wait-before-starting
+ "Return a random number of seconds to wait before starting MetaBot processess, between 0 and 59. This is done to
+ introduce a bit of jitter that should prevent a rush of multiple instances all racing to become the MetaBot at the
+ same time."
+ []
+ (mod (.nextInt (java.security.SecureRandom.)) 60))
(defn start-metabot!
"Start the MetaBot! :robot_face:
- This will spin up a background thread that opens and maintains a Slack WebSocket connection."
+ This will spin up a background thread that opens and maintains a Slack WebSocket connection."
[]
- (when (and (slack/slack-token)
- (metabot-enabled))
- (log/info "Starting MetaBot WebSocket monitor thread...")
- (start-websocket-monitor!)))
+ (future
+ (Thread/sleep (* 1000 (seconds-to-wait-before-starting)))
+ (when (and (slack/slack-token)
+ (metabot-enabled))
+ (log/info (trs "Starting MetaBot threads..."))
+ (start-websocket-monitor!)
+ (start-instance-monitor!))))
(defn stop-metabot!
"Stop the MetaBot! :robot_face:
- This will stop the background thread that responsible for the Slack WebSocket connection."
+ This will stop the background thread that responsible for the Slack WebSocket connection."
[]
(log/info (trs "Stopping MetaBot... 🤖"))
(reset! websocket-monitor-thread-id nil)
diff --git a/src/metabase/models/collection.clj b/src/metabase/models/collection.clj
index d4f883758bcd3..0376b0fdcac43 100644
--- a/src/metabase/models/collection.clj
+++ b/src/metabase/models/collection.clj
@@ -296,6 +296,12 @@
[]
(filter i/can-read? (ancestors collection))))
+(s/defn parent-id :- (s/maybe su/IntGreaterThanZero)
+ "Get the immediate parent `collection` id, if set."
+ {:hydrate :parent_id}
+ [{:keys [location]} :- CollectionWithLocationOrRoot]
+ (if location (location-path->parent-id location)))
+
(s/defn children-location :- LocationPath
"Given a `collection` return a location path that should match the `:location` value of all the children of the
Collection.
@@ -1002,7 +1008,12 @@
{:batched-hydrate :personal_collection_id}
[users]
(when (seq users)
+ ;; efficiently create a map of user ID -> personal collection ID
(let [user-id->collection-id (db/select-field->id :personal_owner_id Collection
:personal_owner_id [:in (set (map u/get-id users))])]
+ ;; now for each User, try to find the corresponding ID out of that map. If it's not present (the personal
+ ;; Collection hasn't been created yet), then instead call `user->personal-collection-id`, which will create it
+ ;; as a side-effect. This will ensure this property never comes back as `nil`
(for [user users]
- (assoc user :personal_collection_id (user-id->collection-id (u/get-id user)))))))
+ (assoc user :personal_collection_id (or (user-id->collection-id (u/get-id user))
+ (user->personal-collection-id (u/get-id user))))))))
diff --git a/src/metabase/models/setting.clj b/src/metabase/models/setting.clj
index 29a595b9e26a6..e88a0d3980f11 100644
--- a/src/metabase/models/setting.clj
+++ b/src/metabase/models/setting.clj
@@ -42,7 +42,9 @@
[db :as mdb]
[events :as events]
[util :as u]]
- [metabase.util.honeysql-extensions :as hx]
+ [metabase.util
+ [date :as du]
+ [honeysql-extensions :as hx]]
[puppetlabs.i18n.core :refer [trs tru]]
[schema.core :as s]
[toucan
@@ -60,7 +62,16 @@
(def ^:private Type
- (s/enum :string :boolean :json :integer :double))
+ (s/enum :string :boolean :json :integer :double :timestamp))
+
+(def ^:private default-tag-for-type
+ "Type tag that will be included in the Setting's metadata, so that the getter function will not cause reflection
+ warnings."
+ {:string String
+ :boolean Boolean
+ :integer Long
+ :double Double
+ :timestamp java.sql.Timestamp})
(def ^:private SettingDefinition
{:name s/Keyword
@@ -69,7 +80,9 @@
:type Type ; all values are stored in DB as Strings,
:getter clojure.lang.IFn ; different getters/setters take care of parsing/unparsing
:setter clojure.lang.IFn
- :internal? s/Bool}) ; should the API never return this setting? (default: false)
+ :tag (s/maybe Class) ; type annotation, e.g. ^String, to be applied. Defaults to tag based on :type
+ :internal? s/Bool ; should the API never return this setting? (default: false)
+ :cache? s/Bool}) ; should the getter always fetch this value "fresh" from the DB? (default: false)
(defonce ^:private registered-settings
@@ -157,10 +170,13 @@
(when-let [last-known-update (core/get @cache settings-last-updated-key)]
;; compare it to the value in the DB. This is done be seeing whether a row exists
;; WHERE value >
- (db/select-one Setting
- {:where [:and
- [:= :key settings-last-updated-key]
- [:> :value last-known-update]]})))))
+ (u/prog1 (db/select-one Setting
+ {:where [:and
+ [:= :key settings-last-updated-key]
+ [:> :value last-known-update]]})
+ (when <>
+ (log/info (u/format-color 'red
+ (trs "Settings have been changed on another instance, and will be reloaded here.")))))))))
(def ^:private cache-update-check-interval-ms
"How often we should check whether the Settings cache is out of date (which requires a DB call)?"
@@ -227,15 +243,20 @@
(when (seq v)
v)))
+(def ^:private ^:dynamic *disable-cache* false)
+
(defn- db-value
"Get the value, if any, of `setting-or-name` from the DB (using / restoring the cache as needed)."
^String [setting-or-name]
- (restore-cache-if-needed!)
- (clojure.core/get @cache (setting-name setting-or-name)))
+ (if *disable-cache*
+ (db/select-one-field :value Setting :key (setting-name setting-or-name))
+ (do
+ (restore-cache-if-needed!)
+ (clojure.core/get @cache (setting-name setting-or-name)))))
(defn get-string
- "Get string value of `setting-or-name`. This is the default getter for `String` settings; valuBis fetched as follows:
+ "Get string value of `setting-or-name`. This is the default getter for `String` settings; value is fetched as follows:
1. From the database (i.e., set via the admin panel), if a value is present;
2. From corresponding env var, if any;
@@ -285,18 +306,26 @@
[setting-or-name]
(json/parse-string (get-string setting-or-name) keyword))
+(defn get-timestamp
+ "Get the string value of `setting-or-name` and parse it as an ISO-8601-formatted string, returning a Timestamp."
+ [setting-or-name]
+ (du/->Timestamp (get-string setting-or-name) :no-timezone))
+
(def ^:private default-getter-for-type
- {:string get-string
- :boolean get-boolean
- :integer get-integer
- :json get-json
- :double get-double})
+ {:string get-string
+ :boolean get-boolean
+ :integer get-integer
+ :json get-json
+ :timestamp get-timestamp
+ :double get-double})
(defn get
"Fetch the value of `setting-or-name`. What this means depends on the Setting's `:getter`; by default, this looks for
first for a corresponding env var, then checks the cache, then returns the default value of the Setting, if any."
[setting-or-name]
- ((:getter (resolve-setting setting-or-name))))
+ (let [{:keys [cache? getter]} (resolve-setting setting-or-name)]
+ (binding [*disable-cache* (not cache?)]
+ (getter))))
;;; +----------------------------------------------------------------------------------------------------------------+
@@ -353,7 +382,10 @@
(swap! cache assoc setting-name new-value)
(swap! cache dissoc setting-name))
;; Record the fact that a Setting has been updated so eventaully other instances (if applicable) find out about it
- (update-settings-last-updated!)
+ ;; (For Settings that don't use the Cache, don't update the `last-updated` value, because it will cause other
+ ;; instances to do needless reloading of the cache from the DB)
+ (when-not *disable-cache*
+ (update-settings-last-updated!))
;; Now return the `new-value`.
new-value))
@@ -389,15 +421,20 @@
(defn set-json!
"Serialize `new-value` for `setting-or-name` as a JSON string and save it."
[setting-or-name new-value]
- (set-string! setting-or-name (when new-value
- (json/generate-string new-value))))
+ (set-string! setting-or-name (some-> new-value json/generate-string)))
+
+(defn set-timestamp!
+ "Serialize `new-value` for `setting-or-name` as a ISO 8601-encoded timestamp strign and save it."
+ [setting-or-name new-value]
+ (set-string! setting-or-name (some-> new-value du/date->iso-8601)))
(def ^:private default-setter-for-type
- {:string set-string!
- :boolean set-boolean!
- :integer set-integer!
- :json set-json!
- :double set-double!})
+ {:string set-string!
+ :boolean set-boolean!
+ :integer set-integer!
+ :json set-json!
+ :timestamp set-timestamp!
+ :double set-double!})
(defn set!
"Set the value of `setting-or-name`. What this means depends on the Setting's `:setter`; by default, this just updates
@@ -409,7 +446,9 @@
(mandrill-api-key \"xyz123\")"
[setting-or-name new-value]
- ((:setter (resolve-setting setting-or-name)) new-value))
+ (let [{:keys [setter cache?]} (resolve-setting setting-or-name)]
+ (binding [*disable-cache* (not cache?)]
+ (setter new-value))))
;;; +----------------------------------------------------------------------------------------------------------------+
@@ -427,7 +466,9 @@
:default default
:getter (partial (default-getter-for-type setting-type) setting-name)
:setter (partial (default-setter-for-type setting-type) setting-name)
- :internal? false}
+ :tag (default-tag-for-type setting-type)
+ :internal? false
+ :cache? true}
(dissoc setting :name :type :default)))
(s/validate SettingDefinition <>)
(swap! registered-settings assoc setting-name <>)))
@@ -440,10 +481,11 @@
(defn metadata-for-setting-fn
"Create metadata for the function automatically generated by `defsetting`."
- [{:keys [default description], setting-type :type, :as setting}]
+ [{:keys [default description tag], setting-type :type, :as setting}]
{:arglists '([] [new-value])
;; indentation below is intentional to make it clearer what shape the generated documentation is going to take.
;; Turn on auto-complete-mode in Emacs and see for yourself!
+ :tag tag
:doc (str/join "\n" [ description
""
(format "`%s` is a %s Setting. You can get its value by calling:" (setting-name setting) (name setting-type))
@@ -501,7 +543,9 @@
* `:setter` - A custom setter fn, which takes a single argument. Overrides the default implementation. (This
can in turn call functions in this namespace like `set-string!` or `set-boolean!` to invoke the
default setter behavior. Keep in mind that the custom setter may be passed `nil`, which should
- clear the values of the Setting.)"
+ clear the values of the Setting.)
+ * `:cache?` - Should this Setting be cached? (default `true`)? Be careful when disabling this, because it could
+ have a very negative performance impact."
{:style/indent 1}
[setting-symb description & {:as options}]
{:pre [(symbol? setting-symb)]}
diff --git a/src/metabase/pulse.clj b/src/metabase/pulse.clj
index 96a16402d4699..aa2d7f8d484f5 100644
--- a/src/metabase/pulse.clj
+++ b/src/metabase/pulse.clj
@@ -15,8 +15,7 @@
[metabase.util
[ui-logic :as ui]
[urls :as urls]]
- [metabase.util.urls :as urls]
- [puppetlabs.i18n.core :refer [tru]]
+ [puppetlabs.i18n.core :refer [trs tru]]
[schema.core :as s]
[toucan.db :as db])
(:import java.util.TimeZone
@@ -212,7 +211,12 @@
(defn- send-notifications! [notifications]
(doseq [notification notifications]
- (send-notification! notification)))
+ ;; do a try-catch around each notification so if one fails, we'll still send the other ones for example, an Alert
+ ;; set up to send over both Slack & email: if Slack fails, we still want to send the email (#7409)
+ (try
+ (send-notification! notification)
+ (catch Throwable e
+ (log/error e (trs "Error sending notification!"))))))
(defn- pulse->notifications [{:keys [cards channel-ids], :as pulse}]
(let [results (for [card cards
diff --git a/src/metabase/query_processor/middleware/expand_macros.clj b/src/metabase/query_processor/middleware/expand_macros.clj
index d4365fa7d2526..dd4a878c8c366 100644
--- a/src/metabase/query_processor/middleware/expand_macros.clj
+++ b/src/metabase/query_processor/middleware/expand_macros.clj
@@ -112,13 +112,14 @@
"Merge filter clauses."
([] [])
([clause] clause)
- ([base-clause additional-clauses]
- (cond
- (and (seq base-clause)
- (seq additional-clauses)) [:and base-clause additional-clauses]
- (seq base-clause) base-clause
- (seq additional-clauses) additional-clauses
- :else [])))
+ ([base-clause & additional-clauses]
+ (let [additional-clauses (filter seq additional-clauses)]
+ (cond
+ (and (seq base-clause)
+ (seq additional-clauses)) (apply vector :and base-clause additional-clauses)
+ (seq base-clause) base-clause
+ (seq additional-clauses) (apply merge-filter-clauses additional-clauses)
+ :else []))))
(defn- add-metrics-filter-clauses
"Add any FILTER-CLAUSES to the QUERY-DICT. If query has existing filter clauses, the new ones are
diff --git a/src/metabase/setup.clj b/src/metabase/setup.clj
index bdb1f1be029e9..da1c2828b1c0e 100644
--- a/src/metabase/setup.clj
+++ b/src/metabase/setup.clj
@@ -1,27 +1,33 @@
-(ns metabase.setup)
+(ns metabase.setup
+ (:require [metabase.models.setting :refer [Setting defsetting]]
+ [toucan.db :as db]))
-(defonce ^:private setup-token
- (atom nil))
+(defsetting ^:private setup-token
+ "A token used to signify that an instance has permissions to create the initial User. This is created upon the first
+ launch of Metabase, by the first instance; once used, it is cleared out, never to be used again."
+ :internal? true)
(defn token-value
"Return the value of the setup token, if any."
[]
- @setup-token)
+ (setup-token))
(defn token-match?
"Function for checking if the supplied string matches our setup token.
- Returns boolean `true` if supplied token matches `@setup-token`, `false` otherwise."
+ Returns boolean `true` if supplied token matches the setup token, `false` otherwise."
[token]
{:pre [(string? token)]}
- (= token @setup-token))
+ (= token (setup-token)))
(defn create-token!
- "Create and set a new `@setup-token`.
- Returns the newly created token."
+ "Create and set a new setup token, if one has not already been created. Returns the newly created token."
[]
- (reset! setup-token (str (java.util.UUID/randomUUID))))
+ ;; fetch the value directly from the DB; *do not* rely on cached value, in case a different instance came along and
+ ;; already created it
+ (or (db/select-one-field :value Setting :key "setup-token")
+ (setup-token (str (java.util.UUID/randomUUID)))))
(defn clear-token!
- "Clear the `@setup-token` if it exists and reset it to nil."
+ "Clear the setup token if it exists and reset it to `nil`."
[]
- (reset! setup-token nil))
+ (setup-token nil))
diff --git a/src/metabase/sync/interface.clj b/src/metabase/sync/interface.clj
index 1ee51060d5575..ade235b04f51f 100644
--- a/src/metabase/sync/interface.clj
+++ b/src/metabase/sync/interface.clj
@@ -22,7 +22,7 @@
(def TableMetadataField
"Schema for a given Field as provided in `describe-table`."
{:name su/NonBlankString
- :database-type su/NonBlankString
+ :database-type (s/maybe su/NonBlankString) ; blank if the Field is all NULL & untyped, i.e. in Mongo
:base-type su/FieldType
(s/optional-key :special-type) (s/maybe su/FieldType)
(s/optional-key :pk?) s/Bool
diff --git a/src/metabase/sync/sync_metadata/fields.clj b/src/metabase/sync/sync_metadata/fields.clj
index 1aa22346f0148..9f4fd5e12ba89 100644
--- a/src/metabase/sync/sync_metadata/fields.clj
+++ b/src/metabase/sync/sync_metadata/fields.clj
@@ -74,7 +74,7 @@
{:table_id (u/get-id table)
:name field-name
:display_name (humanization/name->human-readable-name field-name)
- :database_type database-type
+ :database_type (or database-type "NULL") ; placeholder for Fields w/ no type info (e.g. Mongo) & all NULL
:base_type base-type
:special_type (special-type field)
:parent_id parent-id}))))
diff --git a/src/metabase/util/date.clj b/src/metabase/util/date.clj
index 2ad043a84d29e..d74ccbe438a3d 100644
--- a/src/metabase/util/date.clj
+++ b/src/metabase/util/date.clj
@@ -24,7 +24,7 @@
:tag TimeZone}
*data-timezone*)
-(defprotocol ITimeZoneCoercible
+(defprotocol ^:private ITimeZoneCoercible
"Coerce object to `java.util.TimeZone`"
(coerce-to-timezone ^TimeZone [this]
"Coerce `this` to `java.util.TimeZone`"))
@@ -41,7 +41,8 @@
"UTC TimeZone"
(coerce-to-timezone "UTC"))
-(def ^:private jvm-timezone
+(def jvm-timezone
+ "Machine time zone"
(delay (coerce-to-timezone (System/getProperty "user.timezone"))))
(defn- warn-on-timezone-conflict
@@ -85,7 +86,7 @@
[db & body]
`(call-with-effective-timezone ~db (fn [] ~@body)))
-(defprotocol ITimestampCoercible
+(defprotocol ^:private ITimestampCoercible
"Coerce object to a `java.sql.Timestamp`."
(coerce-to-timestamp ^java.sql.Timestamp [this] [this timezone-coercible]
"Coerce this object to a `java.sql.Timestamp`. Strings are parsed as ISO-8601."))
@@ -110,7 +111,12 @@
(defn ^Timestamp ->Timestamp
"Converts `coercible-to-ts` to a `java.util.Timestamp`. Requires a `coercible-to-tz` if converting a string. Leans
- on clj-time to ensure correct conversions between the various types"
+ on clj-time to ensure correct conversions between the various types
+
+ NOTE: This function requires you to pass in a timezone or bind `*report-timezone*`, probably to make sure you're not
+ doing something dumb by forgetting it. For cases where you'd just like to parse an ISO-8601-encoded String in peace
+ without specifying a timezone, pass in `:no-timezone` as the second param to explicitly have things parsed without
+ one. (Keep in mind that if your string does not specify a timezone, it will be parsed as UTC by default.)"
([coercible-to-ts]
{:pre [(or (not (string? coercible-to-ts))
(and (string? coercible-to-ts) (bound? #'*report-timezone*)))]}
@@ -119,10 +125,11 @@
{:pre [(or (not (string? coercible-to-ts))
(and (string? coercible-to-ts) timezone))]}
(if (string? coercible-to-ts)
- (coerce-to-timestamp (str->date-time coercible-to-ts (coerce-to-timezone timezone)))
+ (coerce-to-timestamp (str->date-time coercible-to-ts (when-not (= timezone :no-timezone)
+ (coerce-to-timezone timezone))))
(coerce-to-timestamp coercible-to-ts))))
-(defprotocol IDateTimeFormatterCoercible
+(defprotocol ^:private IDateTimeFormatterCoercible
"Protocol for converting objects to `DateTimeFormatters`."
(->DateTimeFormatter ^org.joda.time.format.DateTimeFormatter [this]
"Coerce object to a `DateTimeFormatter`."))
@@ -139,15 +146,15 @@
(defn parse-date
- "Parse a datetime string S with a custom DATE-FORMAT, which can be a format string, clj-time formatter keyword, or
+ "Parse a datetime string `s` with a custom `date-format`, which can be a format string, clj-time formatter keyword, or
anything else that can be coerced to a `DateTimeFormatter`.
- (parse-date \"yyyyMMdd\" \"20160201\") -> #inst \"2016-02-01\"
- (parse-date :date-time \"2016-02-01T00:00:00.000Z\") -> #inst \"2016-02-01\""
+ (parse-date \"yyyyMMdd\" \"20160201\") -> #inst \"2016-02-01\"
+ (parse-date :date-time \"2016-02-01T00:00:00.000Z\") -> #inst \"2016-02-01\""
^java.sql.Timestamp [date-format, ^String s]
(->Timestamp (time/parse (->DateTimeFormatter date-format) s)))
-(defprotocol ISO8601
+(defprotocol ^:private ISO8601
"Protocol for converting objects to ISO8601 formatted strings."
(->iso-8601-datetime ^String [this timezone-id]
"Coerce object to an ISO8601 date-time string such as \"2015-11-18T23:55:03.841Z\" with a given TIMEZONE."))
@@ -174,12 +181,12 @@
(time/formatters :time)))))
(defn format-time
- "Returns a string representation of the time found in `T`"
+ "Returns a string representation of the time found in `t`"
[t time-zone-id]
(time/unparse (time-formatter time-zone-id) (coerce/to-date-time t)))
(defn is-time?
- "Returns true if `V` is a Time object"
+ "Returns true if `v` is a Time object"
[v]
(and v (instance? Time v)))
@@ -198,13 +205,13 @@
(->Timestamp (System/currentTimeMillis)))
(defn format-date
- "Format DATE using a given DATE-FORMAT. NOTE: This will create a date string in the JVM's timezone, not the report
+ "Format `date` using a given `date-format`. NOTE: This will create a date string in the JVM's timezone, not the report
timezone.
- DATE is anything that can coerced to a `Timestamp` via `->Timestamp`, such as a `Date`, `Timestamp`,
- `Long` (ms since the epoch), or an ISO-8601 `String`. DATE defaults to the current moment in time.
+ `date` is anything that can coerced to a `Timestamp` via `->Timestamp`, such as a `Date`, `Timestamp`,
+ `Long` (ms since the epoch), or an ISO-8601 `String`. `date` defaults to the current moment in time.
- DATE-FORMAT is anything that can be passed to `->DateTimeFormatter`, such as `String`
+ `date-format` is anything that can be passed to `->DateTimeFormatter`, such as `String`
(using [the usual date format args](http://docs.oracle.com/javase/7/docs/api/java/text/SimpleDateFormat.html)),
`Keyword`, or `DateTimeFormatter`.
@@ -218,7 +225,7 @@
(time/unparse (->DateTimeFormatter date-format) (coerce/from-sql-time (->Timestamp date)))))
(def ^{:arglists '([] [date])} date->iso-8601
- "Format DATE a an ISO-8601 string."
+ "Format `date` a an ISO-8601 string."
(partial format-date :date-time))
(defn date-string?
@@ -231,14 +238,14 @@
(->Timestamp s utc)))))
(defn ->Date
- "Coerece DATE to a `java.util.Date`."
+ "Coerece `date` to a `java.util.Date`."
(^java.util.Date []
(java.util.Date.))
(^java.util.Date [date]
(java.util.Date. (.getTime (->Timestamp date)))))
(defn ->Calendar
- "Coerce DATE to a `java.util.Calendar`."
+ "Coerce `date` to a `java.util.Calendar`."
(^java.util.Calendar []
(doto (Calendar/getInstance)
(.setTimeZone (TimeZone/getTimeZone "UTC"))))
@@ -250,7 +257,7 @@
(.setTimeZone (TimeZone/getTimeZone timezone-id)))))
(defn relative-date
- "Return a new `Timestamp` relative to the current time using a relative date UNIT.
+ "Return a new Timestamp relative to the current time using a relative date `unit`.
(relative-date :year -1) -> #inst 2014-11-12 ..."
(^java.sql.Timestamp [unit amount]
@@ -275,7 +282,7 @@
:year})
(defn date-extract
- "Extract UNIT from DATE. DATE defaults to now.
+ "Extract `unit` from `date`. `date` defaults to now.
(date-extract :year) -> 2015"
([unit]
@@ -324,7 +331,7 @@
(format "%d-%02d-01'T'ZZ" year month)))
(defn date-trunc
- "Truncate DATE to UNIT. DATE defaults to now.
+ "Truncate `date` to `unit`. `date` defaults to now.
(date-trunc :month).
;; -> #inst \"2015-11-01T00:00:00\""
@@ -344,7 +351,7 @@
:year (trunc-with-format "yyyy-01-01'T'ZZ" date timezone-id))))
(defn date-trunc-or-extract
- "Apply date bucketing with UNIT to DATE. DATE defaults to now."
+ "Apply date bucketing with `unit` to `date`. `date` defaults to now."
([unit]
(date-trunc-or-extract unit (System/currentTimeMillis) "UTC"))
([unit date]
@@ -369,9 +376,25 @@
(recur (/ n divisor) more)
(format "%.0f %s" (double n) (name unit)))))
+(defn format-microseconds
+ "Format a time interval in microseconds into something more readable."
+ ^String [microseconds]
+ (format-nanoseconds (* 1000.0 microseconds)))
+
+(defn format-milliseconds
+ "Format a time interval in milliseconds into something more readable."
+ ^String [milliseconds]
+ (format-microseconds (* 1000.0 milliseconds)))
+
+(defn format-seconds
+ "Format a time interval in seconds into something more readable."
+ ^String [seconds]
+ (format-milliseconds (* 1000.0 seconds)))
+
+;; TODO - Not sure this belongs in the datetime util namespace
(defmacro profile
- "Like `clojure.core/time`, but lets you specify a MESSAGE that gets printed with the total time,
- and formats the time nicely using `format-nanoseconds`."
+ "Like `clojure.core/time`, but lets you specify a `message` that gets printed with the total time, and formats the
+ time nicely using `format-nanoseconds`."
{:style/indent 1}
([form]
`(profile ~(str form) ~form))
@@ -383,8 +406,8 @@
(format-nanoseconds (- (System/nanoTime) start-time#))))))))
(defn- str->date-time-with-formatters
- "Attempt to parse `DATE-STR` using `FORMATTERS`. First successful
- parse is returned, or nil"
+ "Attempt to parse `date-str` using `formatters`. First successful parse is returned, or `nil` if it cannot be
+ successfully parsed."
([formatters date-str]
(str->date-time-with-formatters formatters date-str nil))
([formatters ^String date-str ^TimeZone tz]
@@ -401,9 +424,9 @@
(->DateTimeFormatter "yyyy-MM-dd HH:mm:ss.SSS"))
(def ^:private ordered-date-parsers
- "When using clj-time.format/parse without a formatter, it tries all default formatters, but not ordered by how
- likely the date formatters will succeed. This leads to very slow parsing as many attempts fail before the right one
- is found. Using this retains that flexibility but improves performance by trying the most likely ones first"
+ "When using clj-time.format/parse without a formatter, it tries all default formatters, but not ordered by how likely
+ the date formatters will succeed. This leads to very slow parsing as many attempts fail before the right one is
+ found. Using this retains that flexibility but improves performance by trying the most likely ones first"
(let [most-likely-default-formatters [:mysql :date-hour-minute-second :date-time :date
:basic-date-time :basic-date-time-no-ms
:date-time :date-time-no-ms]]
@@ -412,7 +435,7 @@
(vals (apply dissoc time/formatters most-likely-default-formatters)))))
(defn str->date-time
- "Like clj-time.format/parse but uses an ordered list of parsers to be faster. Returns the parsed date or nil if it
+ "Like clj-time.format/parse but uses an ordered list of parsers to be faster. Returns the parsed date, or `nil` if it
was unable to be parsed."
(^org.joda.time.DateTime [^String date-str]
(str->date-time date-str nil))
@@ -425,8 +448,7 @@
[(time/formatter "HH:mmZ") (time/formatter "HH:mm:SSZ") (time/formatter "HH:mm:SS.SSSZ")])))
(defn str->time
- "Parse `TIME-STR` and return a `java.sql.Time` instance. Returns nil
- if `TIME-STR` can't be parsed."
+ "Parse `time-str` and return a `java.sql.Time` instance. Returns `nil` if `time-str` can't be parsed."
([^String date-str]
(str->time date-str nil))
([^String date-str ^TimeZone tz]
diff --git a/src/metabase/util/password.clj b/src/metabase/util/password.clj
index 31893cca15781..762f019f9abfe 100644
--- a/src/metabase/util/password.clj
+++ b/src/metabase/util/password.clj
@@ -1,4 +1,5 @@
(ns metabase.util.password
+ "Utility functions for checking passwords against hashes and for making sure passwords match complexity requirements."
(:require [cemerick.friend.credentials :as creds]
[metabase
[config :as config]
diff --git a/test/metabase/api/collection_test.clj b/test/metabase/api/collection_test.clj
index 7db7f76504667..8614ce3faef1f 100644
--- a/test/metabase/api/collection_test.clj
+++ b/test/metabase/api/collection_test.clj
@@ -180,6 +180,7 @@
:personal_owner_id (user->id :lucky)
:effective_ancestors ()
:effective_location "/"
+ :parent_id nil
:id (u/get-id (collection/user->personal-collection (user->id :lucky)))
:location "/"})
@@ -336,7 +337,8 @@
:id "root"
:can_write true
:effective_location nil
- :effective_ancestors []}
+ :effective_ancestors []
+ :parent_id nil}
(with-some-children-of-collection nil
((user->client :crowberto) :get 200 "collection/root")))
diff --git a/test/metabase/automagic_dashboards/core_test.clj b/test/metabase/automagic_dashboards/core_test.clj
index fd17104868250..9e7c3f72a9a92 100644
--- a/test/metabase/automagic_dashboards/core_test.clj
+++ b/test/metabase/automagic_dashboards/core_test.clj
@@ -1,14 +1,20 @@
(ns metabase.automagic-dashboards.core-test
- (:require [expectations :refer :all]
+ (:require [clj-time
+ [core :as t]
+ [format :as t.format]]
+ [expectations :refer :all]
[metabase.api.common :as api]
[metabase.automagic-dashboards
[core :refer :all :as magic]
[rules :as rules]]
[metabase.models
[card :refer [Card]]
+ [collection :refer [Collection]]
[database :refer [Database]]
[field :as field :refer [Field]]
[metric :refer [Metric]]
+ [permissions :as perms]
+ [permissions-group :as perms-group]
[query :as query]
[table :refer [Table] :as table]
[user :as user]]
@@ -16,6 +22,8 @@
[metabase.test.data :as data]
[metabase.test.data.users :as test-users]
[metabase.test.util :as tu]
+ [metabase.util.date :as date]
+ [puppetlabs.i18n.core :as i18n :refer [tru]]
[toucan.db :as db]
[toucan.util.test :as tt]))
@@ -82,15 +90,15 @@
(tree-seq (some-fn sequential? map?) identity)
(keep (fn [form]
(when (map? form)
- (:url form))))))
+ ((some-fn :url :more) form))))))
(defn- valid-urls?
[dashboard]
(->> dashboard
collect-urls
(every? (fn [url]
- ((test-users/user->client :rasta) :get 200 (format "automagic-dashboards/%s"
- (subs url 16)))))))
+ ((test-users/user->client :rasta) :get 200
+ (format "automagic-dashboards/%s" (subs url 16)))))))
(def ^:private valid-card?
(comp qp/expand :dataset_query))
@@ -103,12 +111,28 @@
(assert (every? valid-card? (keep :card (:ordered_cards dashboard))))
true)
+(defn- test-automagic-analysis
+ ([entity] (test-automagic-analysis entity nil))
+ ([entity cell-query]
+ ;; We want to both generate as many cards as we can to catch all aberrations, but also make sure
+ ;; that size limiting works.
+ (and (valid-dashboard? (automagic-analysis entity {:cell-query cell-query :show :all}))
+ (valid-dashboard? (automagic-analysis entity {:cell-query cell-query :show 1})))))
+
(expect
(with-rasta
(with-dashboard-cleanup
(->> (db/select Table :db_id (data/id))
- (keep #(automagic-analysis % {}))
- (every? valid-dashboard?)))))
+ (every? test-automagic-analysis)))))
+
+(expect
+ (with-rasta
+ (with-dashboard-cleanup
+ (->> (automagic-analysis (Table (data/id :venues)) {:show 1})
+ :ordered_cards
+ (filter :card)
+ count
+ (= 1)))))
(expect
(with-rasta
@@ -116,28 +140,32 @@
(->> (db/select Field
:table_id [:in (db/select-field :id Table :db_id (data/id))]
:visibility_type "normal")
- (keep #(automagic-analysis % {}))
- (every? valid-dashboard?)))))
+ (every? test-automagic-analysis)))))
(expect
(tt/with-temp* [Metric [{metric-id :id} {:table_id (data/id :venues)
:definition {:query {:aggregation ["count"]}}}]]
(with-rasta
(with-dashboard-cleanup
- (->> (Metric) (keep #(automagic-analysis % {})) (every? valid-dashboard?))))))
+ (->> (Metric) (every? test-automagic-analysis))))))
(expect
- (tt/with-temp* [Card [{card-id :id} {:table_id (data/id :venues)
+ (tt/with-temp* [Collection [{collection-id :id}]
+ Card [{card-id :id} {:table_id (data/id :venues)
+ :collection_id collection-id
:dataset_query {:query {:filter [:> [:field-id (data/id :venues :price)] 10]
:source_table (data/id :venues)}
:type :query
:database (data/id)}}]]
(with-rasta
(with-dashboard-cleanup
- (-> card-id Card (automagic-analysis {}) valid-dashboard?)))))
+ (perms/grant-collection-readwrite-permissions! (perms-group/all-users) collection-id)
+ (-> card-id Card test-automagic-analysis)))))
(expect
- (tt/with-temp* [Card [{card-id :id} {:table_id (data/id :venues)
+ (tt/with-temp* [Collection [{collection-id :id}]
+ Card [{card-id :id} {:table_id (data/id :venues)
+ :collection_id collection-id
:dataset_query {:query {:aggregation [[:count]]
:breakout [[:field-id (data/id :venues :category_id)]]
:source_table (data/id :venues)}
@@ -145,80 +173,98 @@
:database (data/id)}}]]
(with-rasta
(with-dashboard-cleanup
- (-> card-id Card (automagic-analysis {}) valid-dashboard?)))))
+ (-> card-id Card test-automagic-analysis)))))
(expect
- (tt/with-temp* [Card [{card-id :id} {:table_id nil
+ (tt/with-temp* [Collection [{collection-id :id}]
+ Card [{card-id :id} {:table_id nil
+ :collection_id collection-id
:dataset_query {:native {:query "select * from users"}
:type :native
:database (data/id)}}]]
(with-rasta
(with-dashboard-cleanup
- (-> card-id Card (automagic-analysis {}) valid-dashboard?)))))
+ (perms/grant-collection-readwrite-permissions! (perms-group/all-users) collection-id)
+ (-> card-id Card test-automagic-analysis)))))
(expect
- (tt/with-temp* [Card [{source-id :id} {:table_id (data/id :venues)
+ (tt/with-temp* [Collection [{collection-id :id}]
+ Card [{source-id :id} {:table_id (data/id :venues)
+ :collection_id collection-id
:dataset_query {:query {:source_table (data/id :venues)}
:type :query
:database (data/id)}}]
Card [{card-id :id} {:table_id (data/id :venues)
+ :collection_id collection-id
:dataset_query {:query {:filter [:> [:field-id (data/id :venues :price)] 10]
:source_table (str "card__" source-id)}
:type :query
:database -1337}}]]
(with-rasta
(with-dashboard-cleanup
- (-> card-id Card (automagic-analysis {}) valid-dashboard?)))))
+ (perms/grant-collection-readwrite-permissions! (perms-group/all-users) collection-id)
+ (-> card-id Card test-automagic-analysis)))))
(expect
- (tt/with-temp* [Card [{source-id :id} {:table_id nil
+ (tt/with-temp* [Collection [{collection-id :id}]
+ Card [{source-id :id} {:table_id nil
+ :collection_id collection-id
:dataset_query {:native {:query "select * from users"}
:type :native
:database (data/id)}}]
Card [{card-id :id} {:table_id (data/id :venues)
+ :collection_id collection-id
:dataset_query {:query {:filter [:> [:field-id (data/id :venues :price)] 10]
:source_table (str "card__" source-id)}
:type :query
:database -1337}}]]
(with-rasta
(with-dashboard-cleanup
- (-> card-id Card (automagic-analysis {}) valid-dashboard?)))))
+ (perms/grant-collection-readwrite-permissions! (perms-group/all-users) collection-id)
+ (-> card-id Card test-automagic-analysis)))))
(expect
- (tt/with-temp* [Card [{card-id :id} {:table_id nil
+ (tt/with-temp* [Collection [{collection-id :id}]
+ Card [{card-id :id} {:table_id nil
+ :collection_id collection-id
:dataset_query {:native {:query "select * from users"}
:type :native
:database (data/id)}}]]
(with-rasta
(with-dashboard-cleanup
- (-> card-id Card (automagic-analysis {}) valid-dashboard?)))))
+ (perms/grant-collection-readwrite-permissions! (perms-group/all-users) collection-id)
+ (-> card-id Card test-automagic-analysis)))))
(expect
- (tt/with-temp* [Card [{card-id :id} {:table_id (data/id :venues)
+ (tt/with-temp* [Collection [{collection-id :id}]
+ Card [{card-id :id} {:table_id (data/id :venues)
+ :collection_id collection-id
:dataset_query {:query {:filter [:> [:field-id (data/id :venues :price)] 10]
:source_table (data/id :venues)}
:type :query
:database (data/id)}}]]
(with-rasta
(with-dashboard-cleanup
+ (perms/grant-collection-readwrite-permissions! (perms-group/all-users) collection-id)
(-> card-id
Card
- (automagic-analysis {:cell-query [:= [:field-id (data/id :venues :category_id)] 2]})
- valid-dashboard?)))))
+ (test-automagic-analysis [:= [:field-id (data/id :venues :category_id)] 2]))))))
(expect
- (tt/with-temp* [Card [{card-id :id} {:table_id (data/id :venues)
+ (tt/with-temp* [Collection [{collection-id :id}]
+ Card [{card-id :id} {:table_id (data/id :venues)
+ :collection_id collection-id
:dataset_query {:query {:filter [:> [:field-id (data/id :venues :price)] 10]
:source_table (data/id :venues)}
:type :query
:database (data/id)}}]]
(with-rasta
(with-dashboard-cleanup
+ (perms/grant-collection-readwrite-permissions! (perms-group/all-users) collection-id)
(-> card-id
Card
- (automagic-analysis {:cell-query [:!= [:field-id (data/id :venues :category_id)] 2]})
- valid-dashboard?)))))
+ (test-automagic-analysis [:= [:field-id (data/id :venues :category_id)] 2]))))))
(expect
@@ -228,7 +274,7 @@
:source_table (data/id :venues)}
:type :query
:database (data/id)})]
- (-> q (automagic-analysis {}) valid-dashboard?)))))
+ (test-automagic-analysis q)))))
(expect
(with-rasta
@@ -238,7 +284,7 @@
:source_table (data/id :venues)}
:type :query
:database (data/id)})]
- (-> q (automagic-analysis {}) valid-dashboard?)))))
+ (test-automagic-analysis q)))))
(expect
(with-rasta
@@ -248,7 +294,7 @@
:source_table (data/id :checkins)}
:type :query
:database (data/id)})]
- (-> q (automagic-analysis {}) valid-dashboard?)))))
+ (test-automagic-analysis q)))))
(expect
(with-rasta
@@ -257,9 +303,7 @@
:source_table (data/id :venues)}
:type :query
:database (data/id)})]
- (-> q
- (automagic-analysis {:cell-query [:= [:field-id (data/id :venues :category_id)] 2]})
- valid-dashboard?)))))
+ (test-automagic-analysis q [:= [:field-id (data/id :venues :category_id)] 2])))))
;;; ------------------- /candidates -------------------
@@ -408,3 +452,60 @@
(#'magic/optimal-datetime-resolution
{:fingerprint {:type {:type/DateTime {:earliest "2017-01-01T00:00:00"
:latest "2017-01-01T00:02:00"}}}}))
+
+
+;;; ------------------- Datetime humanization (for chart and dashboard titles) -------------------
+
+(let [tz (-> date/jvm-timezone deref ^TimeZone .getID)
+ dt (t/from-time-zone (t/date-time 1990 9 9 12 30)
+ (t/time-zone-for-id tz))
+ unparse-with-formatter (fn [formatter dt]
+ (t.format/unparse
+ (t.format/formatter formatter (t/time-zone-for-id tz)) dt))]
+ (expect
+ [(tru "at {0}" (unparse-with-formatter "h:mm a, MMMM d, YYYY" dt))
+ (tru "at {0}" (unparse-with-formatter "h a, MMMM d, YYYY" dt))
+ (tru "on {0}" (unparse-with-formatter "MMMM d, YYYY" dt))
+ (tru "in {0} week - {1}"
+ (#'magic/pluralize (date/date-extract :week-of-year dt tz))
+ (str (date/date-extract :year dt tz)))
+ (tru "in {0}" (unparse-with-formatter "MMMM YYYY" dt))
+ (tru "in Q{0} - {1}"
+ (date/date-extract :quarter-of-year dt tz)
+ (str (date/date-extract :year dt tz)))
+ (unparse-with-formatter "YYYY" dt)
+ (unparse-with-formatter "EEEE" dt)
+ (tru "at {0}" (unparse-with-formatter "h a" dt))
+ (unparse-with-formatter "MMMM" dt)
+ (tru "Q{0}" (date/date-extract :quarter-of-year dt tz))
+ (date/date-extract :minute-of-hour dt tz)
+ (date/date-extract :day-of-month dt tz)
+ (date/date-extract :week-of-year dt tz)]
+ (let [dt (t.format/unparse (t.format/formatters :date-hour-minute-second) dt)]
+ [(#'magic/humanize-datetime dt :minute)
+ (#'magic/humanize-datetime dt :hour)
+ (#'magic/humanize-datetime dt :day)
+ (#'magic/humanize-datetime dt :week)
+ (#'magic/humanize-datetime dt :month)
+ (#'magic/humanize-datetime dt :quarter)
+ (#'magic/humanize-datetime dt :year)
+ (#'magic/humanize-datetime dt :day-of-week)
+ (#'magic/humanize-datetime dt :hour-of-day)
+ (#'magic/humanize-datetime dt :month-of-year)
+ (#'magic/humanize-datetime dt :quarter-of-year)
+ (#'magic/humanize-datetime dt :minute-of-hour)
+ (#'magic/humanize-datetime dt :day-of-month)
+ (#'magic/humanize-datetime dt :week-of-year)])))
+
+(expect
+ [(tru "{0}st" 1)
+ (tru "{0}nd" 22)
+ (tru "{0}rd" 303)
+ (tru "{0}th" 0)
+ (tru "{0}th" 8)]
+ (map #'magic/pluralize [1 22 303 0 8]))
+
+;; Make sure we have handlers for all the units available
+(expect
+ (every? (partial #'magic/humanize-datetime "1990-09-09T12:30:00")
+ (concat (var-get #'date/date-extract-units) (var-get #'date/date-trunc-units))))
diff --git a/test/metabase/automagic_dashboards/filters_test.clj b/test/metabase/automagic_dashboards/filters_test.clj
index 28f3e6584d083..0c64afef3910c 100644
--- a/test/metabase/automagic_dashboards/filters_test.clj
+++ b/test/metabase/automagic_dashboards/filters_test.clj
@@ -12,7 +12,7 @@
;; If there's no overlap between filter clauses, just merge using `:and`.
(expect
- [:and [:and [:and [:= [:field-id 3] 42] [:= [:fk-> 1 9] "foo"]] [:> [:field-id 2] 10]] [:< [:field-id 2] 100]]
+ [:and [:= [:field-id 3] 42] [:= [:fk-> 1 9] "foo"] [:> [:field-id 2] 10] [:< [:field-id 2] 100]]
(inject-refinement [:and [:= [:fk-> 1 9] "foo"]
[:and [:> [:field-id 2] 10]
[:< [:field-id 2] 100]]]
diff --git a/test/metabase/driver/mongo_test.clj b/test/metabase/driver/mongo_test.clj
index 4a346543db6a2..7c4b6fb1cd648 100644
--- a/test/metabase/driver/mongo_test.clj
+++ b/test/metabase/driver/mongo_test.clj
@@ -122,6 +122,24 @@
:pk? true}}}
(driver/describe-table (MongoDriver.) (data/db) (Table (data/id :venues))))
+;; Make sure that all-NULL columns work are synced correctly (#6875)
+(i/def-database-definition ^:private all-null-columns
+ [["bird_species"
+ [{:field-name "name", :base-type :type/Text}
+ {:field-name "favorite_snack", :base-type :type/Text}]
+ [["House Finch" nil]
+ ["Mourning Dove" nil]]]])
+
+(datasets/expect-with-engine :mongo
+ [{:name "_id", :database_type "java.lang.Long", :base_type :type/Integer, :special_type :type/PK}
+ {:name "favorite_snack", :database_type "NULL", :base_type :type/*, :special_type nil}
+ {:name "name", :database_type "java.lang.String", :base_type :type/Text, :special_type :type/Name}]
+ (data/dataset metabase.driver.mongo-test/all-null-columns
+ (map (partial into {})
+ (db/select [Field :name :database_type :base_type :special_type]
+ :table_id (data/id :bird_species)
+ {:order-by [:name]}))))
+
;;; table-rows-sample
(datasets/expect-with-engine :mongo
diff --git a/test/metabase/metabot_test.clj b/test/metabase/metabot_test.clj
new file mode 100644
index 0000000000000..be6bee6467c2e
--- /dev/null
+++ b/test/metabase/metabot_test.clj
@@ -0,0 +1,37 @@
+(ns metabase.metabot-test
+ (:require [expectations :refer :all]
+ [metabase.metabot :as metabot]
+ [metabase.util.date :as du]))
+
+;; test that if we're not the MetaBot based on Settings, our function to check is working correctly
+(expect
+ false
+ (do
+ (#'metabot/metabot-instance-uuid nil)
+ (#'metabot/metabot-instance-last-checkin nil)
+ (#'metabot/am-i-the-metabot?)))
+
+;; test that if nobody is currently the MetaBot, we will become the MetaBot
+(expect
+ (do
+ (#'metabot/metabot-instance-uuid nil)
+ (#'metabot/metabot-instance-last-checkin nil)
+ (#'metabot/check-and-update-instance-status!)
+ (#'metabot/am-i-the-metabot?)))
+
+;; test that if nobody has checked in as MetaBot for a while, we will become the MetaBot
+(expect
+ (do
+ (#'metabot/metabot-instance-uuid (str (java.util.UUID/randomUUID)))
+ (#'metabot/metabot-instance-last-checkin (du/relative-date :minute -10 (#'metabot/current-timestamp-from-db)))
+ (#'metabot/check-and-update-instance-status!)
+ (#'metabot/am-i-the-metabot?)))
+
+;; check that if another instance has checked in recently, we will *not* become the MetaBot
+(expect
+ false
+ (do
+ (#'metabot/metabot-instance-uuid (str (java.util.UUID/randomUUID)))
+ (#'metabot/metabot-instance-last-checkin (#'metabot/current-timestamp-from-db))
+ (#'metabot/check-and-update-instance-status!)
+ (#'metabot/am-i-the-metabot?)))
diff --git a/test/metabase/models/collection_test.clj b/test/metabase/models/collection_test.clj
index 766410e9d8239..159aad11adb1b 100644
--- a/test/metabase/models/collection_test.clj
+++ b/test/metabase/models/collection_test.clj
@@ -7,14 +7,16 @@
[card :refer [Card]]
[collection :as collection :refer [Collection]]
[dashboard :refer [Dashboard]]
- [permissions :refer [Permissions] :as perms]
+ [permissions :as perms :refer [Permissions]]
[permissions-group :as group :refer [PermissionsGroup]]
[pulse :refer [Pulse]]
[user :refer [User]]]
[metabase.test.data.users :as test-users]
[metabase.test.util :as tu]
[metabase.util :as u]
- [toucan.db :as db]
+ [toucan
+ [db :as db]
+ [hydrate :refer [hydrate]]]
[toucan.util.test :as tt]))
(defn force-create-personal-collections!
@@ -1460,6 +1462,13 @@
(let [personal-collection (collection/user->personal-collection my-cool-user)]
(db/update! Collection (u/get-id personal-collection) :personal_owner_id (test-users/user->id :crowberto)))))
+;; Does hydrating `:personal_collection_id` force creation of Personal Collections?
+(expect
+ (tt/with-temp User [temp-user]
+ (-> (hydrate temp-user :personal_collection_id)
+ :personal_collection_id
+ integer?)))
+
;;; +----------------------------------------------------------------------------------------------------------------+
;;; | Moving Collections "Across the Boundary" |
diff --git a/test/metabase/models/dashboard_test.clj b/test/metabase/models/dashboard_test.clj
index f3548eb6f6c6c..1c7338e6491f8 100644
--- a/test/metabase/models/dashboard_test.clj
+++ b/test/metabase/models/dashboard_test.clj
@@ -229,14 +229,15 @@
;; test that we save a transient dashboard
(expect
- 8
(tu/with-model-cleanup ['Card 'Dashboard 'DashboardCard 'Collection]
(binding [api/*current-user-id* (users/user->id :rasta)
api/*current-user-permissions-set* (-> :rasta
users/user->id
user/permissions-set
atom)]
- (->> (magic/automagic-analysis (Table (id :venues)) {})
- save-transient-dashboard!
- :id
- (db/count 'DashboardCard :dashboard_id)))))
+ (let [dashboard (magic/automagic-analysis (Table (id :venues)) {})]
+ (->> dashboard
+ save-transient-dashboard!
+ :id
+ (db/count 'DashboardCard :dashboard_id)
+ (= (-> dashboard :ordered_cards count)))))))
diff --git a/test/metabase/models/setting_test.clj b/test/metabase/models/setting_test.clj
index c7c55ef06ac6b..6474e20c67236 100644
--- a/test/metabase/models/setting_test.clj
+++ b/test/metabase/models/setting_test.clj
@@ -49,6 +49,10 @@
(test-setting-2 setting-2-value))
+(expect
+ String
+ (:tag (meta #'test-setting-1)))
+
;; ## GETTERS
;; Test defsetting getter fn. Should return the value from env var MB_TEST_SETTING_1
(expect "ABCDEFG"
@@ -219,6 +223,10 @@
;;; ------------------------------------------------ BOOLEAN SETTINGS ------------------------------------------------
+(expect
+ Boolean
+ (:tag (meta #'test-boolean-setting)))
+
(expect
{:value nil, :is_env_setting false, :env_name "MB_TEST_BOOLEAN_SETTING", :default nil}
(user-facing-info-with-db-and-env-var-values :test-boolean-setting nil nil))
@@ -293,6 +301,10 @@
;; ok, make sure the setting was set
(toucan-name)))
+(expect
+ String
+ (:tag (meta #'toucan-name)))
+
;;; ----------------------------------------------- Encrypted Settings -----------------------------------------------
@@ -323,6 +335,23 @@
(actual-value-in-db :toucan-name)))
+;;; ----------------------------------------------- TIMESTAMP SETTINGS -----------------------------------------------
+
+(defsetting ^:private test-timestamp-setting
+ "Test timestamp setting"
+ :type :timestamp)
+
+(expect
+ java.sql.Timestamp
+ (:tag (meta #'test-timestamp-setting)))
+
+;; make sure we can set & fetch the value and that it gets serialized/deserialized correctly
+(expect
+ #inst "2018-07-11T09:32:00.000Z"
+ (do (test-timestamp-setting #inst "2018-07-11T09:32:00.000Z")
+ (test-timestamp-setting)))
+
+
;;; --------------------------------------------- Cache Synchronization ----------------------------------------------
(def ^:private settings-last-updated-key @(resolve 'metabase.models.setting/settings-last-updated-key))
@@ -444,3 +473,34 @@
;; detect a cache out-of-date situation and flush the cache as appropriate, giving us the updated value when we
;; call! :wow:
(toucan-name)))
+
+
+;;; ----------------------------------------------- Uncached Settings ------------------------------------------------
+
+(defsetting ^:private uncached-setting
+ "A test setting that should *not* be cached."
+ :cache? false)
+
+;; make sure uncached setting still saves to the DB
+(expect
+ "ABCDEF"
+ (encryption-test/with-secret-key nil
+ (uncached-setting "ABCDEF")
+ (actual-value-in-db "uncached-setting")))
+
+;; make sure that fetching the Setting always fetches the latest value from the DB
+(expect
+ "123456"
+ (encryption-test/with-secret-key nil
+ (uncached-setting "ABCDEF")
+ (db/update-where! Setting {:key "uncached-setting"}
+ :value "123456")
+ (uncached-setting)))
+
+;; make sure that updating the setting doesn't update the last-updated timestamp in the cache $$
+(expect
+ nil
+ (encryption-test/with-secret-key nil
+ (clear-settings-last-updated-value-in-db!)
+ (uncached-setting "abcdef")
+ (settings-last-updated-value-in-db)))
diff --git a/test/metabase/test/util.clj b/test/metabase/test/util.clj
index 006e5cf54a798..304763012af27 100644
--- a/test/metabase/test/util.clj
+++ b/test/metabase/test/util.clj
@@ -37,7 +37,8 @@
[dataset-definitions :as defs]]
[toucan.db :as db]
[toucan.util.test :as test])
- (:import java.util.TimeZone
+ (:import com.mchange.v2.c3p0.PooledDataSource
+ java.util.TimeZone
org.joda.time.DateTimeZone
[org.quartz CronTrigger JobDetail JobKey Scheduler Trigger]))
@@ -451,7 +452,7 @@
timezone. That causes problems for tests that we can determine the database's timezone. This function will reset the
connections in the connection pool for `db` to ensure that we get fresh session with no timezone specified"
[db]
- (when-let [conn-pool (:datasource (sql/db->pooled-connection-spec db))]
+ (when-let [^PooledDataSource conn-pool (:datasource (sql/db->pooled-connection-spec db))]
(.softResetAllUsers conn-pool)))
(defn db-timezone-id