Skip to content

Commit

Permalink
Update dependencies
Browse files Browse the repository at this point in the history
  • Loading branch information
Quartermaster committed May 20, 2022
1 parent 98f0598 commit 6c43a0c
Show file tree
Hide file tree
Showing 5 changed files with 21 additions and 134 deletions.
2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
4.4-143-g82ffe2b42 (2022-05-20 11:52:57)
4.4-144-g98f05980b (2022-05-20 13:47:05)
2 changes: 1 addition & 1 deletion _version.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = "4.4"
__version__ = "4.5"
90 changes: 0 additions & 90 deletions models/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -176,96 +176,6 @@ def cap_ftp_sync(user_id=None):

tasks["cap_ftp_sync"] = cap_ftp_sync

# -----------------------------------------------------------------------------
if has_module("doc"):

# -----------------------------------------------------------------------------
def document_create_index(document, user_id=None):

import os
from xlrd import open_workbook
from pyth.plugins.rtf15.reader import Rtf15Reader
from pyth.plugins.plaintext.writer import PlaintextWriter
import sunburnt

document = json.loads(document)
table = s3db.doc_document
id = document["id"]

name = document["name"]
filename = document["filename"]

filename = "%s/%s/uploads/%s" % (os.path.abspath("applications"), \
request.application, filename)

si = sunburnt.SolrInterface(settings.get_base_solr_url())

extension = os.path.splitext(filename)[1][1:]

if extension == "pdf":
data = os.popen("pdf2txt.py " + filename).read()
elif extension == "doc":
data = os.popen("antiword " + filename).read()
elif extension == "xls":
wb = open_workbook(filename)
data=" "
for s in wb.sheets():
for row in range(s.nrows):
values = []
for col in range(s.ncols):
values.append(str(s.cell(row, col).value))
data = data + ",".join(values) + "\n"
elif extension == "rtf":
doct = Rtf15Reader.read(open(filename))
data = PlaintextWriter.write(doct).getvalue()
else:
data = os.popen("strings " + filename).read()

# The text needs to be in unicode or ascii, with no contol characters
data = str(unicode(data, errors="ignore"))
data = "".join(c if ord(c) >= 32 else " " for c in data)

# Put the data according to the Multiple Fields
# @ToDo: Also, would change this according to requirement of Eden
document = {"id": str(id), # doc_document.id
"name": data, # the data of the file
"url": filename, # the encoded file name stored in uploads/
"filename": name, # the filename actually uploaded by the user
"filetype": extension # x.pdf -> pdf is the extension of the file
}

# Add and commit Indices
si.add(document)
si.commit()
# After Indexing, set the value for has_been_indexed to True in the database
db(table.id == id).update(has_been_indexed = True)

db.commit()

tasks["document_create_index"] = document_create_index

# -----------------------------------------------------------------------------
def document_delete_index(document, user_id=None):

import sunburnt

document = json.loads(document)
table = s3db.doc_document
id = document["id"]
filename = document["filename"]

si = sunburnt.SolrInterface(settings.get_base_solr_url())

# Delete and Commit the indicies of the deleted document
si.delete(id)
si.commit()
# After removing the index, set has_been_indexed value to False in the database
db(table.id == id).update(has_been_indexed = False)

db.commit()

tasks["document_delete_index"] = document_delete_index

# -----------------------------------------------------------------------------
if has_module("msg"):

Expand Down
56 changes: 16 additions & 40 deletions optional_requirements.txt
Original file line number Diff line number Diff line change
@@ -1,62 +1,38 @@
# Warning: XLSWriter unresolved dependency: xlwt required for XLS export
xlwt>=0.7.2
# Warning: XLSXWriter unresolved dependency: openpyxl required for XLSX export and import
openpyxl>=3.0.9
# Warning: GIS unresolved dependency: geopy required for Geocoder support
geopy>=2.0.0
# Warning: GIS unresolved dependency: shapely required for GIS support
Shapely>=1.2.14 #shapely
Shapely>=1.7.0
# Warning: S3PDF unresolved dependency: Python Imaging required for PDF export
Pillow>=6.2.2 #from PIL import Image
# Warning: GIS unresolved dependency: GDAL required for Shapefile support
GDAL>=1.9.0 #from osgeo import ogr
# Warning: GIS unresolved dependency: geopy required for Geocoder support
geopy>=1.18.1 #from geopy import geocoders
Pillow>=8.4.0
# Warning: S3PDF unresolved dependency: reportlab required for PDF export
reportlab>=3.6
reportlab>=3.6.8
# Warning: XLSWriter unresolved dependency: xlwt required for XLS export
xlwt>=1.3.0
# Warning: XLSWriter unresolved dependency: xlrd required for XLS import
xlrd>=1.2.0
# Warning: GIS unresolved dependency: GDAL required for Shapefile support
GDAL>=3.4.1
# Warning: S3Msg unresolved dependency: pyserial required for Serial port modem usage
pyserial>=2.6
# Warning: S3Msg unresolved dependency: tweepy required for non-Tropo Twitter support
tweepy>=1.9
# Warning: XLSWriter unresolved dependency: xlrd required for XLS export and import
xlrd>=0.7.1
# Warning: XLSWriter unresolved dependency: openpyxl required for XLSX import
openpyxl>=3.0.7
# Warning: S3MSG unresolved dependency: sgmllib3k required for Feed import on Python 3.x
sgmllib3k>=1.0.0
# Warning: GIS unresolved dependency: selenium required for Map printing support
selenium>=2.23.0
# Warning: S3Doc unresolved dependency: sunburnt required for Full-Text Search support
sunburnt>=0.6
# Warning: S3Doc unresolved dependency: pyth required for RTF document support in Full-Text Search
pyth>=0.5.6
# Warning: S3Chart unresolved dependency: matplotlib required for charting in Survey module
matplotlib>=1.1.1
# Warning: Survey unresolved dependency: PyRTF required if you want to export assessment/survey templates as a Word document
PyRTF>=0.45
# Warning: Message Parsing unresolved dependency: TwitterSearch required for fetching results from twitter keyword queries
TwitterSearch>=1.0
# Warning: Messaging unresolved dependency: requests required for posting to Facebook
requests>=2.3.0
# Warning: Setup unresolved dependency: ansible required for Setup Module
ansible>=2.9.3
# Warning: Setup unresolved dependency: PyYAML required for Setup Module
PyYAML>=5.1 #yaml
# Warning: Setup unresolved dependency: ruamel.yaml required for Setup Module
#ruamel.yaml>=0.15.35
# Warning: Setup unresolved dependency: arrow used for some HRM functionality for IFRC
#arrow>=0.10.0
# Warning: Setup unresolved dependency: boto required for Setup Module to be able to manage AWS EC2 instances
boto>=2.49.0
# Warning: Setup unresolved dependency: boto required for Setup Module to be able to manage AWS EC2 keypairs
boto3>=1.11.4
# Warning: Setup unresolved dependency: openstacksdk required for Setup Module to be able to manage OpenStack instances
openstacksdk>=0.41.0
# Warning: Setup unresolved dependency: dnspython required for Setup Module to be able to do DNS Lookups
dnspython>=1.16.0
# Warning: Setup unresolved dependency: google-api-python-client required to be able to manage Google Email Groups
google-api-python-client>=1.7.11
# Warning: S3 unresolved dependency: pyparsing required for Calendar Support and Advanced URL Queries
pyparsing>=2.0.1
pyparsing>=2.2.0
# Warning: URL shortener unresolved dependency: pyshorteners required to shorten the url (currently used by SAMBRO)
#pyshorteners>=0.6.1
# Warning: S3Doc unresolved dependency: docx-mailmerge required to merge into docx templates (currently used by DRKCM)
#docx-mailmerge>=0.4.0
# Warning: S3Doc unresolved dependency: docx-mailmerge required to merge into docx templates
#docx-mailmerge>=0.5.0
# Warning: S3Translate unresolved dependency: translate-toolkit required for Pootle support
translate-toolkit>=1.0.1
5 changes: 3 additions & 2 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
python-dateutil>=2.1 #dateutil
lxml>=2.2.8
python-dateutil>=2.7.3
lxml>=4.4.2
requests>=2.26.0

0 comments on commit 6c43a0c

Please sign in to comment.