diff --git a/src/actions/org.js b/src/actions/org.js
index ebb1b7ee2..eaee6fa7b 100644
--- a/src/actions/org.js
+++ b/src/actions/org.js
@@ -110,119 +110,123 @@ export const sync = (options) => (dispatch, getState) => {
// wrapping function `syncDebounced`. This will actually debounce
// `doSync`, because the inner function `sync` will be created only
// once.
-const doSync = ({
- forceAction = null,
- successMessage = 'Changes pushed',
- shouldSuppressMessages = false,
- path,
-} = {}) => (dispatch, getState) => {
- const client = getState().syncBackend.get('client');
- const currentPath = getState().org.present.get('path');
- path = path || currentPath;
- if (!path || path.startsWith(STATIC_FILE_PREFIX)) {
- return;
- }
+const doSync =
+ ({
+ forceAction = null,
+ successMessage = 'Changes pushed',
+ shouldSuppressMessages = false,
+ path,
+ } = {}) =>
+ (dispatch, getState) => {
+ const client = getState().syncBackend.get('client');
+ const currentPath = getState().org.present.get('path');
+ path = path || currentPath;
+ if (!path || path.startsWith(STATIC_FILE_PREFIX)) {
+ return;
+ }
- // Calls do `doSync` are already debounced using a timer, but on big
- // Org files or slow connections, it's still possible to have
- // concurrent requests to `doSync` which has no merit. When
- // `isLoading`, don't trigger another sync in parallel. Instead,
- // call `syncDebounced` and return immediately. This will
- // recursively enqueue the request to do a sync until the current
- // sync is finished. Since it's a debounced call, enqueueing it
- // recursively is efficient.
- // That is, unless the user manually hits the 'sync' button
- // (indicated by `forceAction === 'manual'`). Then, do what the user
- // requests.
- if (getState().base.get('isLoading').includes(path) && forceAction !== 'manual') {
- // Since there is a quick succession of debounced requests to
- // synchronize, the user likely is in a undo/redo workflow with
- // potential new changes to the Org file in between. In such a
- // situation, it is easy for the remote file to have a newer
- // `lastModifiedAt` date than the `lastSyncAt` date. Hence,
- // pushing is the right action - no need for the modal to ask the
- // user for her request to pull/push or cancel.
- dispatch(sync({ forceAction: 'push' }));
- return;
- }
+ // Calls do `doSync` are already debounced using a timer, but on big
+ // Org files or slow connections, it's still possible to have
+ // concurrent requests to `doSync` which has no merit. When
+ // `isLoading`, don't trigger another sync in parallel. Instead,
+ // call `syncDebounced` and return immediately. This will
+ // recursively enqueue the request to do a sync until the current
+ // sync is finished. Since it's a debounced call, enqueueing it
+ // recursively is efficient.
+ // That is, unless the user manually hits the 'sync' button
+ // (indicated by `forceAction === 'manual'`). Then, do what the user
+ // requests.
+ if (getState().base.get('isLoading').includes(path) && forceAction !== 'manual') {
+ // Since there is a quick succession of debounced requests to
+ // synchronize, the user likely is in a undo/redo workflow with
+ // potential new changes to the Org file in between. In such a
+ // situation, it is easy for the remote file to have a newer
+ // `lastModifiedAt` date than the `lastSyncAt` date. Hence,
+ // pushing is the right action - no need for the modal to ask the
+ // user for her request to pull/push or cancel.
+ dispatch(sync({ forceAction: 'push' }));
+ return;
+ }
- if (!shouldSuppressMessages) {
- dispatch(setLoadingMessage(`Syncing ...`));
- }
- dispatch(setIsLoading(true, path));
- dispatch(setOrgFileErrorMessage(null));
-
- client
- .getFileContentsAndMetadata(path)
- .then(({ contents, lastModifiedAt }) => {
- const isDirty = getState().org.present.getIn(['files', path, 'isDirty']);
- const lastServerModifiedAt = parseISO(lastModifiedAt);
- const lastSyncAt = getState().org.present.getIn(['files', path, 'lastSyncAt']);
-
- if (isAfter(lastSyncAt, lastServerModifiedAt) || forceAction === 'push') {
- if (isDirty) {
- const contents = exportOrg({
- headers: getState().org.present.getIn(['files', path, 'headers']),
- linesBeforeHeadings: getState().org.present.getIn([
- 'files',
- path,
- 'linesBeforeHeadings',
- ]),
- dontIndent: getState().base.get('shouldNotIndentOnExport'),
- });
- client
- .updateFile(path, contents)
- .then(() => {
- if (!shouldSuppressMessages) {
- dispatch(setDisappearingLoadingMessage(successMessage, 2000));
- } else {
- setTimeout(() => dispatch(hideLoadingMessage()), 2000);
- }
- dispatch(setIsLoading(false, path));
- dispatch(setDirty(false, path));
- dispatch(setLastSyncAt(addSeconds(new Date(), 5), path));
- })
- .catch((error) => {
- const err = `There was an error pushing the file ${path}: ${error.toString()}`;
- console.error(err);
- dispatch(setDisappearingLoadingMessage(err, 5000));
- dispatch(hideLoadingMessage());
- dispatch(setIsLoading(false, path));
- // Re-enqueue the file to be synchronized again
- dispatch(sync({ path }));
+ if (!shouldSuppressMessages) {
+ dispatch(setLoadingMessage(`Syncing ...`));
+ }
+ dispatch(setIsLoading(true, path));
+ dispatch(setOrgFileErrorMessage(null));
+
+ client
+ .getFileContentsAndMetadata(path)
+ .then(({ contents, lastModifiedAt }) => {
+ const isDirty = getState().org.present.getIn(['files', path, 'isDirty']);
+ const lastServerModifiedAt = parseISO(lastModifiedAt);
+ const lastSyncAt = getState().org.present.getIn(['files', path, 'lastSyncAt']);
+
+ if (isAfter(lastSyncAt, lastServerModifiedAt) || forceAction === 'push') {
+ if (isDirty) {
+ const contents = exportOrg({
+ headers: getState().org.present.getIn(['files', path, 'headers']),
+ linesBeforeHeadings: getState().org.present.getIn([
+ 'files',
+ path,
+ 'linesBeforeHeadings',
+ ]),
+ dontIndent: getState().base.get('shouldNotIndentOnExport'),
});
- } else {
- if (!shouldSuppressMessages) {
- dispatch(setDisappearingLoadingMessage('Nothing to sync', 2000));
+ client
+ .updateFile(path, contents)
+ .then(() => {
+ if (!shouldSuppressMessages) {
+ dispatch(setDisappearingLoadingMessage(successMessage, 2000));
+ } else {
+ setTimeout(() => dispatch(hideLoadingMessage()), 2000);
+ }
+ dispatch(setIsLoading(false, path));
+ dispatch(setDirty(false, path));
+ dispatch(setLastSyncAt(addSeconds(new Date(), 5), path));
+ })
+ .catch((error) => {
+ const err = `There was an error pushing the file ${path}: ${error.toString()}`;
+ console.error(err);
+ dispatch(setDisappearingLoadingMessage(err, 5000));
+ dispatch(hideLoadingMessage());
+ dispatch(setIsLoading(false, path));
+ // Re-enqueue the file to be synchronized again
+ dispatch(sync({ path }));
+ });
} else {
- setTimeout(() => dispatch(hideLoadingMessage()), 2000);
+ if (!shouldSuppressMessages) {
+ dispatch(setDisappearingLoadingMessage('Nothing to sync', 2000));
+ } else {
+ setTimeout(() => dispatch(hideLoadingMessage()), 2000);
+ }
+ dispatch(setIsLoading(false, path));
}
- dispatch(setIsLoading(false, path));
- }
- } else {
- if (isDirty && forceAction !== 'pull') {
- dispatch(hideLoadingMessage());
- dispatch(setIsLoading(false, path));
- dispatch(activatePopup('sync-confirmation', { lastServerModifiedAt, lastSyncAt, path }));
} else {
- dispatch(parseFile(path, contents));
- dispatch(setDirty(false, path));
- dispatch(setLastSyncAt(addSeconds(new Date(), 5), path));
- if (!shouldSuppressMessages) {
- dispatch(setDisappearingLoadingMessage(`Latest version pulled: ${path}`, 2000));
+ if (isDirty && forceAction !== 'pull') {
+ dispatch(hideLoadingMessage());
+ dispatch(setIsLoading(false, path));
+ dispatch(
+ activatePopup('sync-confirmation', { lastServerModifiedAt, lastSyncAt, path })
+ );
} else {
- setTimeout(() => dispatch(hideLoadingMessage()), 2000);
+ dispatch(parseFile(path, contents));
+ dispatch(setDirty(false, path));
+ dispatch(setLastSyncAt(addSeconds(new Date(), 5), path));
+ if (!shouldSuppressMessages) {
+ dispatch(setDisappearingLoadingMessage(`Latest version pulled: ${path}`, 2000));
+ } else {
+ setTimeout(() => dispatch(hideLoadingMessage()), 2000);
+ }
+ dispatch(setIsLoading(false, path));
}
- dispatch(setIsLoading(false, path));
}
- }
- })
- .catch(() => {
- dispatch(hideLoadingMessage());
- dispatch(setIsLoading(false, path));
- dispatch(setOrgFileErrorMessage(`File ${path} not found`));
- });
-};
+ })
+ .catch(() => {
+ dispatch(hideLoadingMessage());
+ dispatch(setIsLoading(false, path));
+ dispatch(setOrgFileErrorMessage(`File ${path} not found`));
+ });
+ };
export const openHeader = (headerId) => ({
type: 'OPEN_HEADER',
@@ -483,15 +487,23 @@ export const updateTableCellValue = (cellId, newValue) => ({
dirtying: true,
});
-export const insertCapture = (templateId, content, shouldPrepend) => (dispatch, getState) => {
- dispatch(closePopup());
+export const insertCapture =
+ (templateId, content, shouldPrepend, shouldCaptureAsNewHeader) => (dispatch, getState) => {
+ dispatch(closePopup());
- const template = getState()
- .capture.get('captureTemplates')
- .concat(sampleCaptureTemplates)
- .find((template) => template.get('id') === templateId);
- dispatch({ type: 'INSERT_CAPTURE', template, content, shouldPrepend, dirtying: true });
-};
+ const template = getState()
+ .capture.get('captureTemplates')
+ .concat(sampleCaptureTemplates)
+ .find((template) => template.get('id') === templateId);
+ dispatch({
+ type: 'INSERT_CAPTURE',
+ template,
+ content,
+ shouldPrepend,
+ shouldCaptureAsNewHeader,
+ dirtying: true,
+ });
+ };
export const clearPendingCapture = () => ({
type: 'CLEAR_PENDING_CAPTURE',
@@ -551,7 +563,14 @@ export const insertPendingCapture = () => (dispatch, getState) => {
)}${captureContent}${substitutedTemplate.substring(initialCursorIndex)}`
: `${substitutedTemplate}${captureContent}`;
- dispatch(insertCapture(template.get('id'), content, template.get('shouldPrepend')));
+ dispatch(
+ insertCapture(
+ template.get('id'),
+ content,
+ template.get('shouldPrepend'),
+ !template.has('shouldCaptureAsNewHeader') || template.get('shouldCaptureAsNewHeader')
+ )
+ );
dispatch(sync({ successMessage: 'Item captured' }));
};
diff --git a/src/components/CaptureTemplatesEditor/components/CaptureTemplate/index.js b/src/components/CaptureTemplatesEditor/components/CaptureTemplate/index.js
index 969ce4f0e..8a17f25f8 100644
--- a/src/components/CaptureTemplatesEditor/components/CaptureTemplate/index.js
+++ b/src/components/CaptureTemplatesEditor/components/CaptureTemplate/index.js
@@ -39,6 +39,13 @@ export default ({
const togglePrepend = () =>
onFieldPathUpdate(template.get('id'), ['shouldPrepend'], !template.get('shouldPrepend'));
+ const toggleCaptureAsNewHeader = () =>
+ onFieldPathUpdate(
+ template.get('id'),
+ ['shouldCaptureAsNewHeader'],
+ !template.get('shouldCaptureAsNewHeader')
+ );
+
const handleAddNewOrgFileAvailability = () => {
onAddNewTemplateOrgFileAvailability(template.get('id'));
};
@@ -259,6 +266,23 @@ export default ({
);
+ const renderCaptureAsNewHeader = (template) => (
+
+
+
Capture as new header?
+
+
+
+
+ By default, new captured content is added as a new header. Disable this setting to append
+ content to an existing one (the last one in the path).
+
+
+ );
+
const renderTemplateField = (template) => (
@@ -369,6 +393,7 @@ export default ({
{renderFilePath(template)}
{renderHeaderPaths(template)}
{renderPrependField(template)}
+ {renderCaptureAsNewHeader(template)}
{renderTemplateField(template)}
{renderDeleteButton()}
diff --git a/src/components/OrgFile/components/CaptureModal/index.js b/src/components/OrgFile/components/CaptureModal/index.js
index 0b7d987c3..369ecd886 100644
--- a/src/components/OrgFile/components/CaptureModal/index.js
+++ b/src/components/OrgFile/components/CaptureModal/index.js
@@ -31,6 +31,10 @@ export default ({ template, onCapture, headers }) => {
const [textareaValue, setTextareaValue] = useState(substitutedTemplate);
const [shouldPrepend, setShouldPrepend] = useState(template.get('shouldPrepend'));
+ const [shouldCaptureAsNewHeader, setShouldCaptureAsNewHeader] = useState(
+ !template.has('shouldCaptureAsNewHeader') ||
+ template.get('shouldCaptureAsNewHeader')
+ );
/** INFO: Some versions of Mobile Safari do _not_ like it when the
focus is set without an explicit user interaction. This is the case
@@ -91,12 +95,16 @@ export default ({ template, onCapture, headers }) => {
}
}, [textarea, initialCursorIndex]);
- const handleCaptureClick = () => onCapture(template.get('id'), textareaValue, shouldPrepend);
+ const handleCaptureClick = () =>
+ onCapture(template.get('id'), textareaValue, shouldPrepend, shouldCaptureAsNewHeader);
const handleTextareaChange = (event) => setTextareaValue(event.target.value);
const handlePrependSwitchToggle = () => setShouldPrepend(!shouldPrepend);
+ const handleCaptureAsNewHeaderSwitchToggle = () =>
+ setShouldCaptureAsNewHeader(!shouldCaptureAsNewHeader);
+
return (
<>
@@ -133,11 +141,18 @@ export default ({ template, onCapture, headers }) => {
Prepend:
-
+
+ Capture as new header:
+
+
+
{/* Add padding to move the above textarea above the fold.
More documentation, see getMinHeight(). */}
{isMobileSafari13 && }
diff --git a/src/components/OrgFile/components/Header/index.js b/src/components/OrgFile/components/Header/index.js
index 7bda74fc5..e238d9beb 100644
--- a/src/components/OrgFile/components/Header/index.js
+++ b/src/components/OrgFile/components/Header/index.js
@@ -344,13 +344,8 @@ ${header.get('rawDescription')}`;
headerDeadlineMap.get('year')
: '';
- const {
- dragStartX,
- currentDragX,
- isDraggingFreely,
- isPlayingRemoveAnimation,
- containerWidth,
- } = this.state;
+ const { dragStartX, currentDragX, isDraggingFreely, isPlayingRemoveAnimation, containerWidth } =
+ this.state;
const marginLeft =
!!dragStartX && !!currentDragX && isDraggingFreely
? currentDragX - dragStartX
diff --git a/src/components/OrgFile/components/TaskListModal/components/TaskListView/index.js b/src/components/OrgFile/components/TaskListModal/components/TaskListView/index.js
index 9365f7be2..d05ce2e56 100644
--- a/src/components/OrgFile/components/TaskListModal/components/TaskListView/index.js
+++ b/src/components/OrgFile/components/TaskListModal/components/TaskListView/index.js
@@ -24,12 +24,8 @@ function TaskListView(props) {
return () => props.onHeaderClick(path, headerId);
}
- const {
- dateDisplayType,
- onToggleDateDisplayType,
- headersForFiles,
- todoKeywordSetsForFiles,
- } = props;
+ const { dateDisplayType, onToggleDateDisplayType, headersForFiles, todoKeywordSetsForFiles } =
+ props;
// Populate filteredHeaders
useEffect(() => {
diff --git a/src/components/OrgFile/components/TimestampEditorModal/components/TimestampEditor/index.js b/src/components/OrgFile/components/TimestampEditorModal/components/TimestampEditor/index.js
index 01e3cafc1..07068be28 100644
--- a/src/components/OrgFile/components/TimestampEditorModal/components/TimestampEditor/index.js
+++ b/src/components/OrgFile/components/TimestampEditorModal/components/TimestampEditor/index.js
@@ -323,16 +323,8 @@ class TimestampEditor extends PureComponent {
>
);
}
- const {
- isActive,
- year,
- month,
- day,
- startHour,
- startMinute,
- endHour,
- endMinute,
- } = timestamp.toJS();
+ const { isActive, year, month, day, startHour, startMinute, endHour, endMinute } =
+ timestamp.toJS();
return (
{renderAsText(timestamp)}
diff --git a/src/components/OrgFile/index.js b/src/components/OrgFile/index.js
index 34d5cf1bc..492e2fefd 100644
--- a/src/components/OrgFile/index.js
+++ b/src/components/OrgFile/index.js
@@ -237,8 +237,8 @@ class OrgFile extends PureComponent {
}
}
- handleCapture(templateId, content, shouldPrepend) {
- this.props.org.insertCapture(templateId, content, shouldPrepend);
+ handleCapture(templateId, content, shouldPrepend, shouldCaptureAsNewHeader) {
+ this.props.org.insertCapture(templateId, content, shouldPrepend, shouldCaptureAsNewHeader);
}
handlePopupClose() {
diff --git a/src/lib/headline_filter.js b/src/lib/headline_filter.js
index de3570002..7296da68f 100644
--- a/src/lib/headline_filter.js
+++ b/src/lib/headline_filter.js
@@ -292,105 +292,102 @@ export const isMatch = (filterExpr) => {
// offsets information, computeLogicalPosition could simplify the algorithm as
// long as the filter string is parsed successfully.
-export const computeCompletions = (todoKeywords, tagNames, allProperties) => (
- filterExpr,
- filterString,
- curserPosition
-) => {
- const tagAndPropNames = [].concat(
- tagNames,
- computeAllPropertyNames(fromJS(allProperties))
- .toJS()
- .map((x) => x + ':')
- );
+export const computeCompletions =
+ (todoKeywords, tagNames, allProperties) => (filterExpr, filterString, curserPosition) => {
+ const tagAndPropNames = [].concat(
+ tagNames,
+ computeAllPropertyNames(fromJS(allProperties))
+ .toJS()
+ .map((x) => x + ':')
+ );
+
+ const logicalCursorPosition = filterExpr
+ ? computeLogicalPosition(filterExpr, filterString, curserPosition)
+ : null;
+
+ const charBeforeCursor = filterString.charAt(curserPosition - 1);
+ const charTwoBeforeCursor = curserPosition > 1 ? filterString.charAt(curserPosition - 2) : '';
- const logicalCursorPosition = filterExpr
- ? computeLogicalPosition(filterExpr, filterString, curserPosition)
- : null;
-
- const charBeforeCursor = filterString.charAt(curserPosition - 1);
- const charTwoBeforeCursor = curserPosition > 1 ? filterString.charAt(curserPosition - 2) : '';
-
- if (logicalCursorPosition === null) {
- } else if (logicalCursorPosition === SPACE_SURROUNDED) {
- return todoKeywords;
- } else if (logicalCursorPosition.type === 'case-sensitive') {
- if (charBeforeCursor.match(/[A-Z]/)) {
- const textBeforeCursor = charBeforeCursor;
- const filteredTodoKeywords = todoKeywords
- .filter((x) => x.startsWith(textBeforeCursor))
- .map((x) => x.substring(textBeforeCursor.length));
- if ([' ', '', '|', '-'].includes(charTwoBeforeCursor)) {
- return filteredTodoKeywords;
+ if (logicalCursorPosition === null) {
+ } else if (logicalCursorPosition === SPACE_SURROUNDED) {
+ return todoKeywords;
+ } else if (logicalCursorPosition.type === 'case-sensitive') {
+ if (charBeforeCursor.match(/[A-Z]/)) {
+ const textBeforeCursor = charBeforeCursor;
+ const filteredTodoKeywords = todoKeywords
+ .filter((x) => x.startsWith(textBeforeCursor))
+ .map((x) => x.substring(textBeforeCursor.length));
+ if ([' ', '', '|', '-'].includes(charTwoBeforeCursor)) {
+ return filteredTodoKeywords;
+ }
}
- }
- } else if (logicalCursorPosition.type === 'ignore-case') {
- // A text filter starting with '-' turns into an exclude filter as soon as text is appended
- if (charBeforeCursor === '-' && [' ', ''].includes(charTwoBeforeCursor)) return todoKeywords;
- return [];
- } else if (logicalCursorPosition.type === 'tag') {
- // This case will likely not occur because ':' alone cannot be parsed
- if (charBeforeCursor === ':') return tagAndPropNames;
- } else if (logicalCursorPosition.type === 'property') {
- if (charBeforeCursor === ':') {
- if (charTwoBeforeCursor === ' ' || charTwoBeforeCursor === '') return tagAndPropNames;
- else {
- // Either property name or text filter
- const indexOfOtherColon = filterString.substring(0, curserPosition - 1).lastIndexOf(':');
- const maybePropertyName = filterString.substring(indexOfOtherColon + 1, curserPosition - 1);
- const quoteStringIfPossible = (x) => {
- if (x.match(/ /)) {
- if (!x.match(/"/)) return [`"${x}"`];
- if (!x.match(/'/)) return [`'${x}'`];
- const match = x.match(/^[^ ]*/);
- return [match[0]];
+ } else if (logicalCursorPosition.type === 'ignore-case') {
+ // A text filter starting with '-' turns into an exclude filter as soon as text is appended
+ if (charBeforeCursor === '-' && [' ', ''].includes(charTwoBeforeCursor)) return todoKeywords;
+ return [];
+ } else if (logicalCursorPosition.type === 'tag') {
+ // This case will likely not occur because ':' alone cannot be parsed
+ if (charBeforeCursor === ':') return tagAndPropNames;
+ } else if (logicalCursorPosition.type === 'property') {
+ if (charBeforeCursor === ':') {
+ if (charTwoBeforeCursor === ' ' || charTwoBeforeCursor === '') return tagAndPropNames;
+ else {
+ // Either property name or text filter
+ const indexOfOtherColon = filterString.substring(0, curserPosition - 1).lastIndexOf(':');
+ const maybePropertyName = filterString.substring(
+ indexOfOtherColon + 1,
+ curserPosition - 1
+ );
+ const quoteStringIfPossible = (x) => {
+ if (x.match(/ /)) {
+ if (!x.match(/"/)) return [`"${x}"`];
+ if (!x.match(/'/)) return [`'${x}'`];
+ const match = x.match(/^[^ ]*/);
+ return [match[0]];
+ }
+ return [x];
+ };
+ if (indexOfOtherColon >= 0 && maybePropertyName.match(/^[^ ]+$/)) {
+ // No space in property name -> is property -> return values for that property
+ return computeAllPropertyValuesFor(fromJS(allProperties), maybePropertyName)
+ .flatMap(quoteStringIfPossible)
+ .toJS();
}
- return [x];
- };
- if (indexOfOtherColon >= 0 && maybePropertyName.match(/^[^ ]+$/)) {
- // No space in property name -> is property -> return values for that property
- return computeAllPropertyValuesFor(fromJS(allProperties), maybePropertyName)
- .flatMap(quoteStringIfPossible)
- .toJS();
}
}
}
- }
- // If ':' or '|' is before cursor, the filter string is likely not
- // successfully parsed and therefore cannot be handled above.
- if (charBeforeCursor === ':') {
- if ([' ', '', '-'].includes(charTwoBeforeCursor)) {
- return tagAndPropNames;
- }
- } else if (charBeforeCursor === '|') {
- const indexOfOtherColon = filterString.substring(0, curserPosition).lastIndexOf(':');
- const maybeTagName = filterString.substring(indexOfOtherColon + 1, curserPosition - 1);
- if (indexOfOtherColon > -1 && !maybeTagName.match(/ /)) {
- // No space characters between ':' and '|' -> '|' is in a tag filter
- return tagNames;
- } else {
- return todoKeywords;
+ // If ':' or '|' is before cursor, the filter string is likely not
+ // successfully parsed and therefore cannot be handled above.
+ if (charBeforeCursor === ':') {
+ if ([' ', '', '-'].includes(charTwoBeforeCursor)) {
+ return tagAndPropNames;
+ }
+ } else if (charBeforeCursor === '|') {
+ const indexOfOtherColon = filterString.substring(0, curserPosition).lastIndexOf(':');
+ const maybeTagName = filterString.substring(indexOfOtherColon + 1, curserPosition - 1);
+ if (indexOfOtherColon > -1 && !maybeTagName.match(/ /)) {
+ // No space characters between ':' and '|' -> '|' is in a tag filter
+ return tagNames;
+ } else {
+ return todoKeywords;
+ }
}
- }
- return [];
-};
+ return [];
+ };
-export const computeCompletionsForDatalist = (todoKeywords, tagNames, allProperties) => (
- filterExpr,
- filterString,
- curserPosition
-) => {
- const completions = computeCompletions(todoKeywords, tagNames, allProperties)(
- filterExpr,
- filterString,
- curserPosition
- );
- return completions.map(
- (x) => filterString.substring(0, curserPosition) + x + filterString.substring(curserPosition)
- );
-};
+export const computeCompletionsForDatalist =
+ (todoKeywords, tagNames, allProperties) => (filterExpr, filterString, curserPosition) => {
+ const completions = computeCompletions(todoKeywords, tagNames, allProperties)(
+ filterExpr,
+ filterString,
+ curserPosition
+ );
+ return completions.map(
+ (x) => filterString.substring(0, curserPosition) + x + filterString.substring(curserPosition)
+ );
+ };
const SPACE_SURROUNDED = ' ';
diff --git a/src/lib/parse_org.js b/src/lib/parse_org.js
index c0ddd6a07..13d921da4 100644
--- a/src/lib/parse_org.js
+++ b/src/lib/parse_org.js
@@ -7,13 +7,16 @@ import _ from 'lodash';
// TODO: Extract all match groups of `beginningRegexp` (for example
// like `emailRegexp`), so that they can be documented and are less
// unwieldly.
-const beginningRegexp = /(\[\[([^\]]*)\]\]|\[\[([^\]]*)\]\[([^\]]*)\]\])|(\[((\d*%)|(\d*\/\d*))\])|((^|\s|[({'"])([*/~=_+])([^\s,'](.*?))\11([\s\-.,:;!?'")}]?))/;
+const beginningRegexp =
+ /(\[\[([^\]]*)\]\]|\[\[([^\]]*)\]\[([^\]]*)\]\])|(\[((\d*%)|(\d*\/\d*))\])|((^|\s|[({'"])([*/~=_+])([^\s,'](.*?))\11([\s\-.,:;!?'")}]?))/;
// Regexp taken from https://stackoverflow.com/a/3809435/999007
-const httpUrlRegexp = /(https?:\/\/(www\.)?[-a-zA-Z0-9@:%._\\+~#=]{1,256}\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9()@:%_\\+.~#?&//=]*))/;
+const httpUrlRegexp =
+ /(https?:\/\/(www\.)?[-a-zA-Z0-9@:%._\\+~#=]{1,256}\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9()@:%_\\+.~#?&//=]*))/;
// Regexp taken from https://stackoverflow.com/a/1373724/999007
-const urlRegexp = /([a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*@(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?)/;
+const urlRegexp =
+ /([a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*@(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?)/;
const internationalPhoneRegexp = /((\+|00)\d{8,30})/;
@@ -29,7 +32,8 @@ const swissPhoneRegexp2 = /(0[0-9]{9,11})/;
const wwwUrlRegexp = /(www(\.[-_a-zA-Z0-9]+){2,}(\/[-_a-zA-Z0-9]+)*)/;
-const timestampOptionalRepeaterOrDelayRegexp = /(?: (\+|\+\+|\.\+|-|--)(\d+)([hdwmy])(?:\/(\d+)([hdwmy]))?)?/;
+const timestampOptionalRepeaterOrDelayRegexp =
+ /(?: (\+|\+\+|\.\+|-|--)(\d+)([hdwmy])(?:\/(\d+)([hdwmy]))?)?/;
const timestampRegex = new RegExp(
[
/([<[])/,
@@ -690,13 +694,8 @@ export const parseDescriptionPrefixElements = (rawText) => {
};
export const _updateHeaderFromDescription = (header, rawUnstrippedDescription) => {
- const {
- planningItems,
- propertyListItems,
- logNotes,
- logBookEntries,
- strippedDescription,
- } = parseDescriptionPrefixElements(rawUnstrippedDescription);
+ const { planningItems, propertyListItems, logNotes, logBookEntries, strippedDescription } =
+ parseDescriptionPrefixElements(rawUnstrippedDescription);
const parsedDescription = parseRawText(strippedDescription);
const parsedTitle = header.getIn(['titleLine', 'title']);
diff --git a/src/lib/sample_capture_templates.js b/src/lib/sample_capture_templates.js
index a1e6e3859..74b46d5e1 100644
--- a/src/lib/sample_capture_templates.js
+++ b/src/lib/sample_capture_templates.js
@@ -12,6 +12,7 @@ export default fromJS([
letter: '',
orgFilesWhereAvailable: [],
shouldPrepend: false,
+ shouldCaptureAsNewHeader: true,
template: '* TODO %?',
isSample: true,
},
@@ -24,6 +25,7 @@ export default fromJS([
letter: '',
orgFilesWhereAvailable: [],
shouldPrepend: true,
+ shouldCaptureAsNewHeader: true,
template: '* You can insert timestamps too! %T %?',
isSample: true,
},
diff --git a/src/migrations/index.js b/src/migrations/index.js
index 0a9bbb30b..7d12b263a 100644
--- a/src/migrations/index.js
+++ b/src/migrations/index.js
@@ -1,9 +1,11 @@
import migrateAccessTokenToDropboxAccessToken from './migrate_access_token_to_dropbox_access_token';
import migrateStoreInDropboxToStoreInSyncBackend from './migrate_store_in_dropbox_to_store_in_sync_backend';
import migrateNonsenseValuesInLocalstorage from './migrate_nonsense_values_in_localstorage';
+import migrateCaptureTemplates from './migrate_capture_templates';
export default () => {
migrateAccessTokenToDropboxAccessToken();
migrateStoreInDropboxToStoreInSyncBackend();
migrateNonsenseValuesInLocalstorage();
+ migrateCaptureTemplates();
};
diff --git a/src/migrations/migrate_capture_templates.js b/src/migrations/migrate_capture_templates.js
new file mode 100644
index 000000000..ca60df7f0
--- /dev/null
+++ b/src/migrations/migrate_capture_templates.js
@@ -0,0 +1,18 @@
+import { localStorageAvailable } from '../util/settings_persister';
+
+export default () => {
+ if (!localStorageAvailable) {
+ return;
+ }
+
+ let captureTemplates = JSON.parse(localStorage.getItem('captureTemplates'));
+ captureTemplates = captureTemplates || {};
+
+ captureTemplates.forEach(t => {
+ if (!t.hasOwnProperty('shouldCaptureAsNewHeader')) {
+ t.shouldCaptureAsNewHeader = true
+ }
+ });
+
+ localStorage.setItem('captureTemplates', JSON.stringify(captureTemplates));
+};
diff --git a/src/reducers/capture.js b/src/reducers/capture.js
index 4497eea34..2ce9dd7eb 100644
--- a/src/reducers/capture.js
+++ b/src/reducers/capture.js
@@ -23,6 +23,7 @@ const addNewEmptyCaptureTemplate = (state) => {
orgFilesWhereAvailable: [''],
headerPaths: [''],
shouldPrepend: false,
+ shouldCaptureAsNewHeader: true,
template: '',
})
)
diff --git a/src/reducers/org.js b/src/reducers/org.js
index c9d07f1de..ddcfbaabb 100644
--- a/src/reducers/org.js
+++ b/src/reducers/org.js
@@ -680,8 +680,10 @@ const addNewTableRow = (state) => {
}
state = state.update('headers', (headers) =>
- updateTableContainingCellId(headers, selectedTableCellId, (rowIndex) => (rows) =>
- rows.insert(rowIndex + 1, newEmptyTableRowLikeRows(rows))
+ updateTableContainingCellId(
+ headers,
+ selectedTableCellId,
+ (rowIndex) => (rows) => rows.insert(rowIndex + 1, newEmptyTableRowLikeRows(rows))
)
);
@@ -697,8 +699,10 @@ const removeTableRow = (state) => {
const containingHeader = headerThatContainsTableCellId(state.get('headers'), selectedTableCellId);
state = state.update('headers', (headers) =>
- updateTableContainingCellId(headers, selectedTableCellId, (rowIndex) => (rows) =>
- rows.delete(rowIndex)
+ updateTableContainingCellId(
+ headers,
+ selectedTableCellId,
+ (rowIndex) => (rows) => rows.delete(rowIndex)
)
);
@@ -714,10 +718,13 @@ const addNewTableColumn = (state) => {
}
state = state.update('headers', (headers) =>
- updateTableContainingCellId(headers, selectedTableCellId, (_rowIndex, colIndex) => (rows) =>
- rows.map((row) =>
- row.update('contents', (contents) => contents.insert(colIndex + 1, newEmptyTableCell()))
- )
+ updateTableContainingCellId(
+ headers,
+ selectedTableCellId,
+ (_rowIndex, colIndex) => (rows) =>
+ rows.map((row) =>
+ row.update('contents', (contents) => contents.insert(colIndex + 1, newEmptyTableCell()))
+ )
)
);
@@ -733,8 +740,11 @@ const removeTableColumn = (state) => {
const containingHeader = headerThatContainsTableCellId(state.get('headers'), selectedTableCellId);
state = state.update('headers', (headers) =>
- updateTableContainingCellId(headers, selectedTableCellId, (_rowIndex, colIndex) => (rows) =>
- rows.map((row) => row.update('contents', (contents) => contents.delete(colIndex)))
+ updateTableContainingCellId(
+ headers,
+ selectedTableCellId,
+ (_rowIndex, colIndex) => (rows) =>
+ rows.map((row) => row.update('contents', (contents) => contents.delete(colIndex)))
)
);
@@ -750,10 +760,13 @@ const moveTableRowDown = (state) => {
}
state = state.update('headers', (headers) =>
- updateTableContainingCellId(headers, selectedTableCellId, (rowIndex) => (rows) =>
- rowIndex + 1 === rows.size
- ? rows
- : rows.insert(rowIndex, rows.get(rowIndex + 1)).delete(rowIndex + 2)
+ updateTableContainingCellId(
+ headers,
+ selectedTableCellId,
+ (rowIndex) => (rows) =>
+ rowIndex + 1 === rows.size
+ ? rows
+ : rows.insert(rowIndex, rows.get(rowIndex + 1)).delete(rowIndex + 2)
)
);
@@ -767,8 +780,11 @@ const moveTableRowUp = (state) => {
}
state = state.update('headers', (headers) =>
- updateTableContainingCellId(headers, selectedTableCellId, (rowIndex) => (rows) =>
- rowIndex === 0 ? rows : rows.insert(rowIndex - 1, rows.get(rowIndex)).delete(rowIndex + 1)
+ updateTableContainingCellId(
+ headers,
+ selectedTableCellId,
+ (rowIndex) => (rows) =>
+ rowIndex === 0 ? rows : rows.insert(rowIndex - 1, rows.get(rowIndex)).delete(rowIndex + 1)
)
);
@@ -782,18 +798,21 @@ const moveTableColumnLeft = (state) => {
}
state = state.update('headers', (headers) =>
- updateTableContainingCellId(headers, selectedTableCellId, (_rowIndex, columnIndex) => (rows) =>
- columnIndex === 0
- ? rows
- : rows.map((row) =>
- row.update('contents', (contents) =>
- contents.size === 0
- ? contents
- : contents
- .insert(columnIndex - 1, contents.get(columnIndex))
- .delete(columnIndex + 1)
+ updateTableContainingCellId(
+ headers,
+ selectedTableCellId,
+ (_rowIndex, columnIndex) => (rows) =>
+ columnIndex === 0
+ ? rows
+ : rows.map((row) =>
+ row.update('contents', (contents) =>
+ contents.size === 0
+ ? contents
+ : contents
+ .insert(columnIndex - 1, contents.get(columnIndex))
+ .delete(columnIndex + 1)
+ )
)
- )
)
);
@@ -807,18 +826,21 @@ const moveTableColumnRight = (state) => {
}
state = state.update('headers', (headers) =>
- updateTableContainingCellId(headers, selectedTableCellId, (_rowIndex, columnIndex) => (rows) =>
- columnIndex + 1 >= rows.getIn([0, 'contents']).size
- ? rows
- : rows.map((row) =>
- row.update('contents', (contents) =>
- contents.size === 0
- ? contents
- : contents
- .insert(columnIndex, contents.get(columnIndex + 1))
- .delete(columnIndex + 2)
+ updateTableContainingCellId(
+ headers,
+ selectedTableCellId,
+ (_rowIndex, columnIndex) => (rows) =>
+ columnIndex + 1 >= rows.getIn([0, 'contents']).size
+ ? rows
+ : rows.map((row) =>
+ row.update('contents', (contents) =>
+ contents.size === 0
+ ? contents
+ : contents
+ .insert(columnIndex, contents.get(columnIndex + 1))
+ .delete(columnIndex + 2)
+ )
)
- )
)
);
@@ -827,12 +849,18 @@ const moveTableColumnRight = (state) => {
const updateTableCellValue = (state, action) => {
state = state.update('headers', (headers) =>
- updateTableContainingCellId(headers, action.cellId, (rowIndex, colIndex) => (rows) =>
- rows.updateIn([rowIndex, 'contents', colIndex], (cell) =>
- cell
- .set('rawContents', action.newValue)
- .set('contents', fromJS(parseMarkupAndCookies(action.newValue, { excludeCookies: true })))
- )
+ updateTableContainingCellId(
+ headers,
+ action.cellId,
+ (rowIndex, colIndex) => (rows) =>
+ rows.updateIn([rowIndex, 'contents', colIndex], (cell) =>
+ cell
+ .set('rawContents', action.newValue)
+ .set(
+ 'contents',
+ fromJS(parseMarkupAndCookies(action.newValue, { excludeCookies: true }))
+ )
+ )
)
);
@@ -841,7 +869,7 @@ const updateTableCellValue = (state, action) => {
const insertCapture = (state, action) => {
const headers = state.get('headers');
- const { template, content, shouldPrepend } = action;
+ const { template, content, shouldPrepend, shouldCaptureAsNewHeader } = action;
const { newIndex, nestingLevel, parentHeader } = insertCapturePosition(
template,
@@ -853,21 +881,53 @@ const insertCapture = (state, action) => {
return state;
}
- const newHeader = newHeaderFromText(content, state.get('todoKeywordSets')).set(
- 'nestingLevel',
- nestingLevel
- );
+ if (!shouldCaptureAsNewHeader) {
+ const headerPaths = template.get('headerPaths');
+ const header = findHeaderMatchingPaths(headers, headerPaths);
+ const headerId = header.get('id');
+ const rawDescription = header.get('rawDescription');
+ const newRawDescription = shouldPrepend
+ ? prependContent(rawDescription, content)
+ : appendContent(rawDescription, content);
+ return updateHeaderDescription(state, { headerId, newRawDescription });
+ } else {
+ const newHeader = newHeaderFromText(content, state.get('todoKeywordSets')).set(
+ 'nestingLevel',
+ nestingLevel
+ );
- state = state.update('headers', (headers) => headers.insert(newIndex, newHeader));
- if (parentHeader !== undefined) {
- // We inserted the new header under a parent rather than at the top or
- // bottom of the file.
- state = updateCookiesOfHeaderWithId(state, parentHeader.get('id'));
+ state = state.update('headers', (headers) => headers.insert(newIndex, newHeader));
+ if (parentHeader !== undefined) {
+ // We inserted the new header under a parent rather than at the top or
+ // bottom of the file.
+ state = updateCookiesOfHeaderWithId(state, parentHeader.get('id'));
+ }
}
return state;
};
+const prependContent = (existing, content) => {
+ if (!existing || existing === '') {
+ return content;
+ }
+ existing = existing.replace(/^[\s\n]*/, '');
+ return content + '\n' + existing;
+};
+
+const appendContent = (existing, content) => {
+ if (!existing || existing === '') {
+ return content;
+ }
+ existing = existing.replace(/[\s\n]*$/, '');
+ return existing + '\n' + content;
+};
+
+const findHeaderMatchingPaths = (headers, headerPaths) => {
+ const header = headerWithPath(headers, headerPaths);
+ return header !== null ? header : newHeaderFromText('', { nestingLevel: 1 });
+};
+
const insertCapturePosition = (template, headers, shouldPrepend) => {
const headerPaths = template.get('headerPaths');
if (headerPaths.size === 0) {
@@ -1421,9 +1481,11 @@ const restoreFileSettings = (state, action) => {
return applyFileSettingsFromConfig(state, action.newSettings);
};
-const reduceInFile = (state, action, path) => (func, ...args) => {
- return state.updateIn(['files', path], (file) => func(file ? file : Map(), action, ...args));
-};
+const reduceInFile =
+ (state, action, path) =>
+ (func, ...args) => {
+ return state.updateIn(['files', path], (file) => func(file ? file : Map(), action, ...args));
+ };
const reducer = (state, action) => {
const path = state.get('path');
diff --git a/src/reducers/org.unit.test.js b/src/reducers/org.unit.test.js
index e92cdbd08..398052e99 100644
--- a/src/reducers/org.unit.test.js
+++ b/src/reducers/org.unit.test.js
@@ -181,6 +181,7 @@ describe('org reducer', () => {
file: '',
orgFilesWhereAvailable: [],
shouldPrepend: false,
+ shouldCaptureAsNewHeader: true,
template: '* TODO %?',
isSample: true,
};
@@ -218,13 +219,18 @@ describe('org reducer', () => {
expect(extractTitleAndNesting(headers.last())).toEqual(['A second nested header', 2]);
}
- function insertCapture(path, template, shouldPrepend) {
+ function insertCapture(path, template, shouldPrepend, shouldCaptureAsNewHeader) {
// Check initially parsed file looks as expected
let headers = store.getState().org.present.getIn(['files', path, 'headers']);
expect(headers.size).toEqual(4);
expectOrigFirstHeader(headers);
expectOrigLastHeader(headers);
- const action = types.insertCapture(template.id, content, shouldPrepend);
+ const action = types.insertCapture(
+ template.id,
+ content,
+ shouldPrepend,
+ shouldCaptureAsNewHeader
+ );
store.dispatch(action);
const newHeaders = store.getState().org.present.getIn(['files', path, 'headers']);
expect(newHeaders.size).toEqual(5);