Skip to content

Commit

Permalink
Fix-CV2-5142: Fix Content warning cover when user is blocked (#2154)
Browse files Browse the repository at this point in the history
 - Display a clear message when content is flagged as SPAM because the user was blocked in a Tipline.
 - Implement the logic in a new component: ContentWarningMessage.
 - Add unit tests for the new component
 - Clean up some warnings in existing unit tests.
 - Clean up eslint-disable react/sort-prop-types in the component
Reference: CV2-5142
  • Loading branch information
danielevalverde authored Oct 8, 2024
1 parent 70058cd commit 18b67cd
Show file tree
Hide file tree
Showing 8 changed files with 176 additions and 72 deletions.
25 changes: 0 additions & 25 deletions localization/react-intl/src/app/components/layout/AspectRatio.json
Original file line number Diff line number Diff line change
@@ -1,19 +1,4 @@
[
{
"id": "contentScreen.adult",
"description": "Content warning type: Adult",
"defaultMessage": "Adult"
},
{
"id": "contentScreen.medical",
"description": "Content warning type: Medical",
"defaultMessage": "Medical"
},
{
"id": "contentScreen.violence",
"description": "Content warning type: Violence",
"defaultMessage": "Violence"
},
{
"id": "contentScreen.viewContentButton",
"description": "Button to enable view of sensitive content",
Expand All @@ -28,15 +13,5 @@
"id": "contentScreen.superAdminMaskMessage",
"description": "Text to show that admin screen is on",
"defaultMessage": "Sensitive Content Masking Applied"
},
{
"id": "contentScreen.warning",
"description": "Content warning displayed over sensitive content",
"defaultMessage": "<strong>{user_name}</strong> has detected this content as <strong>{warning_category}</strong>"
},
{
"id": "contentScreen.warningByAutomationRule",
"description": "Content warning displayed over sensitive content",
"defaultMessage": "An automation rule has detected this content as sensitive"
}
]
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
[
{
"id": "contentScreen.adult",
"description": "Content warning type: Adult",
"defaultMessage": "Adult"
},
{
"id": "contentScreen.medical",
"description": "Content warning type: Medical",
"defaultMessage": "Medical"
},
{
"id": "contentScreen.spam",
"description": "Content warning type: Spam",
"defaultMessage": "Spam"
},
{
"id": "contentScreen.violence",
"description": "Content warning type: Violence",
"defaultMessage": "Violence"
},
{
"id": "contentScreen.warningByAutomationRule",
"description": "Content warning displayed over sensitive content detected by an automation rule",
"defaultMessage": "An automation rule has detected this content as sensitive"
},
{
"id": "contentScreen.warningBySmoochBot",
"description": "Content warning displayed over sensitive content flagged as SPAM by Smooch Bot",
"defaultMessage": "This content has been flagged as <strong>SPAM</strong> because the user was blocked due to sending excessive messages."
},
{
"id": "contentScreen.warning",
"description": "Content warning displayed over sensitive content detected by a specific user",
"defaultMessage": "<strong>{user_name}</strong> has detected this content as <strong>{warning_category}</strong>"
}
]
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import { shallowWithIntl } from '../../../../test/unit/helpers/intl-test';

describe('<ChooseExistingArticleButton />', () => {
it('should open slideout', () => {
const wrapper = shallowWithIntl(<ChooseExistingArticleButton teamSlug="meedan" onAdd={() => {}} />);
const wrapper = shallowWithIntl(<ChooseExistingArticleButton projectMediaDbid={1} teamSlug="meedan" onAdd={() => {}} />);
wrapper.find('ButtonMain').simulate('click');
expect(wrapper.find('Slideout')).toHaveLength(1);
});
Expand Down
1 change: 1 addition & 0 deletions src/app/components/cds/media-cards/ItemThumbnail.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ describe('<ItemThumbnail />', () => {
<ItemThumbnail
maskContent={false}
type="UploadedImage"
url="http://image-test.com"
/>);
expect(wrapper.find(MediaTypeDisplayIcon).length).toEqual(1);
});
Expand Down
57 changes: 11 additions & 46 deletions src/app/components/layout/AspectRatio.js
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
/* eslint-disable react/sort-prop-types */
import React from 'react';
import PropTypes from 'prop-types';
import { FormattedMessage, FormattedHTMLMessage, defineMessages, injectIntl, intlShape } from 'react-intl';
import { FormattedMessage, injectIntl, intlShape } from 'react-intl';
import { graphql, createFragmentContainer } from 'react-relay/compat';
import Lightbox from 'react-image-lightbox';
import 'react-image-lightbox/style.css';
import cx from 'classnames/bind';
import ContentWarningMessage from './ContentWarningMessage.js';
import ButtonMain from '../cds/buttons-checkboxes-chips/ButtonMain';
import SensitiveContentMenuButton from '../media/SensitiveContentMenuButton.js';
import FullscreenIcon from '../../icons/fullscreen.svg';
Expand All @@ -14,24 +14,6 @@ import VisibilityOffIcon from '../../icons/visibility_off.svg';
import DownloadIcon from '../../icons/download.svg';
import styles from './AspectRatio.module.css';

const messages = defineMessages({
adult: {
id: 'contentScreen.adult',
defaultMessage: 'Adult',
description: 'Content warning type: Adult',
},
medical: {
id: 'contentScreen.medical',
defaultMessage: 'Medical',
description: 'Content warning type: Medical',
},
violence: {
id: 'contentScreen.violence',
defaultMessage: 'Violence',
description: 'Content warning type: Violence',
},
});

const AspectRatio = ({
children,
currentUserRole,
Expand Down Expand Up @@ -105,7 +87,7 @@ const AspectRatio = ({
sortable = sortable.concat([...Object.entries(projectMedia.dynamic_annotation_flag.data.custom)]);
}
const filteredFlags = {};
['adult', 'medical', 'violence'].forEach((key) => { filteredFlags[key] = projectMedia.dynamic_annotation_flag.data.flags[key]; });
['adult', 'medical', 'spam', 'violence'].forEach((key) => { filteredFlags[key] = projectMedia.dynamic_annotation_flag.data.flags[key]; });
sortable = sortable.concat([...Object.entries(filteredFlags)]);
sortable.sort((a, b) => b[1] - a[1]);
const type = sortable[0];
Expand Down Expand Up @@ -164,28 +146,11 @@ const AspectRatio = ({
/>
) : null }
<div style={{ visibility: contentWarning && maskContent && !superAdminMask ? 'visible' : 'hidden' }}>
{ warningCreator !== 'Alegre' ? (
<FormattedHTMLMessage
defaultMessage="<strong>{user_name}</strong> has detected this content as <strong>{warning_category}</strong>"
description="Content warning displayed over sensitive content"
id="contentScreen.warning"
tagName="p"
values={{
user_name: warningCreator,
warning_category: (
(messages[warningCategory] && intl.formatMessage(messages[warningCategory])) ||
warningCategory
),
}}
/>
) : (
<FormattedHTMLMessage
defaultMessage="An automation rule has detected this content as sensitive"
description="Content warning displayed over sensitive content"
id="contentScreen.warningByAutomationRule"
tagName="p"
/>
)}
<ContentWarningMessage
intl={intl}
warningCategory={warningCategory}
warningCreator={warningCreator}
/>
</div>
{ contentWarning && !superAdminMask ? <ToggleShowHideButton /> : null }
</div>
Expand Down Expand Up @@ -221,18 +186,18 @@ const AspectRatio = ({

AspectRatio.propTypes = {
children: PropTypes.node.isRequired,
currentUserRole: PropTypes.string.isRequired,
downloadUrl: PropTypes.string,
expandedImage: PropTypes.string,
intl: intlShape.isRequired,
isPenderCard: PropTypes.bool,
isVideoFile: PropTypes.bool,
superAdminMask: PropTypes.bool,
currentUserRole: PropTypes.string.isRequired,
projectMedia: PropTypes.shape({
id: PropTypes.string.isRequired,
show_warning_cover: PropTypes.bool.isRequired,
dynamic_annotation_flag: PropTypes.object.isRequired,
}),
intl: intlShape.isRequired,
superAdminMask: PropTypes.bool,
};

AspectRatio.defaultProps = {
Expand Down
84 changes: 84 additions & 0 deletions src/app/components/layout/ContentWarningMessage.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
import React from 'react';
import { FormattedHTMLMessage, defineMessages, injectIntl, intlShape } from 'react-intl';
import PropTypes from 'prop-types';

const messages = defineMessages({
adult: {
id: 'contentScreen.adult',
defaultMessage: 'Adult',
description: 'Content warning type: Adult',
},
medical: {
id: 'contentScreen.medical',
defaultMessage: 'Medical',
description: 'Content warning type: Medical',
},
spam: {
id: 'contentScreen.spam',
defaultMessage: 'Spam',
description: 'Content warning type: Spam',
},
violence: {
id: 'contentScreen.violence',
defaultMessage: 'Violence',
description: 'Content warning type: Violence',
},
});

const ContentWarningMessage = ({
intl,
warningCategory,
warningCreator,
}) => {
let message;
if (warningCreator === 'Alegre') {
message = (
<FormattedHTMLMessage
defaultMessage="An automation rule has detected this content as sensitive"
description="Content warning displayed over sensitive content detected by an automation rule"
id="contentScreen.warningByAutomationRule"
tagName="p"
/>
);
} else if (warningCreator === 'Smooch Bot' || !warningCreator) {
message = (
<FormattedHTMLMessage
defaultMessage="This content has been flagged as <strong>SPAM</strong> because the user was blocked due to sending excessive messages."
description="Content warning displayed over sensitive content flagged as SPAM by Smooch Bot"
id="contentScreen.warningBySmoochBot"
tagName="p"
/>
);
} else {
message = (
<FormattedHTMLMessage
defaultMessage="<strong>{user_name}</strong> has detected this content as <strong>{warning_category}</strong>"
description="Content warning displayed over sensitive content detected by a specific user"
id="contentScreen.warning"
tagName="p"
values={{
user_name: warningCreator,
warning_category: (
(messages[warningCategory] && intl.formatMessage(messages[warningCategory])) ||
warningCategory
),
}}
/>
);
}

return message;
};

ContentWarningMessage.propTypes = {
intl: intlShape.isRequired,
warningCategory: PropTypes.string,
warningCreator: PropTypes.string,
};

ContentWarningMessage.defaultProps = {
warningCreator: '',
warningCategory: '',
};

export default injectIntl(ContentWarningMessage);
41 changes: 41 additions & 0 deletions src/app/components/layout/ContentWarningMessage.test.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
import React from 'react';
import ContentWarningMessage from './ContentWarningMessage';
import { mountWithIntl } from '../../../../test/unit/helpers/intl-test';

describe('ContentWarningMessage', () => {
it('should render message for content detected by automated rule', () => {
const wrapper = mountWithIntl(<ContentWarningMessage
intl={{}}
warningCategory="adult"
warningCreator="Alegre"
/>);
expect(wrapper.text()).toMatch(/An automation rule has detected this content as sensitive/);
});

it('should render message for content flagged as SPAM by Smooch Bot', () => {
const wrapper = mountWithIntl(<ContentWarningMessage
intl={{}}
warningCategory="spam"
warningCreator="Smooch Bot"
/>);
expect(wrapper.text()).toMatch(/This content has been flagged as SPAM because the user was blocked due to sending excessive messages/);
});

it('should render message for content flagged as SPAM with no warning creator', () => {
const wrapper = mountWithIntl(<ContentWarningMessage
intl={{}}
warningCategory="spam"
warningCreator=""
/>);
expect(wrapper.text()).toMatch(/This content has been flagged as SPAM because the user was blocked due to sending excessive messages/);
});

it('should render message for content detected by a specific user', () => {
const wrapper = mountWithIntl(<ContentWarningMessage
intl={{}}
warningCategory="violence"
warningCreator="test user"
/>);
expect(wrapper.text()).toMatch(/test user has detected this content as Violence/);
});
});
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ describe('<ArticleCard />', () => {
it('should render ArticleCard component', () => {
const wrapper = shallow(<ArticleCard
date={1687921388}
handleClick={() => {}}
statusLabel="Test"
title="Test"
/>);
Expand Down

0 comments on commit 18b67cd

Please sign in to comment.