diff --git a/.travis.yml b/.travis.yml
index b5557c6..3e7b6a3 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -17,7 +17,7 @@ install:
#- sudo add-apt-repository -y ppa:texlive-backports/ppa
#- sudo apt-get -y update
#- sudo apt-get -y install texlive-full
-- wget http://ftp.stack.nl/pub/users/dimitri/doxygen-1.8.7.linux.bin.tar.gz
+- wget https://ftp.stack.nl/pub/users/dimitri/doxygen-1.8.7.linux.bin.tar.gz
- tar -zxvf doxygen-1.8.7.linux.bin.tar.gz -C ${TRAVIS_BUILD_DIR}/tools
- sudo apt-get install -y rpm
before_script:
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 136a5f6..27a7457 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -107,10 +107,10 @@ if(CMAKE_COMPILER_IS_GNUCC)
OUTPUT_STRIP_TRAILING_WHITESPACE)
# A useful summary of warning options can be found here:
- # http://developer.apple.com/tools/xcode/compilercodewarnings.html
+ # https://developer.apple.com/tools/xcode/compilercodewarnings.html
# Note: gcc does not implicitly set _POSIX_C_SOURCE or _XOPEN_SOURCE
# when using -std=c99.
- # http://pubs.opengroup.org/onlinepubs/9699919799/functions/V2_chap02.html#tag_15_02_01_01
+ # https://pubs.opengroup.org/onlinepubs/9699919799/functions/V2_chap02.html#tag_15_02_01_01
# We specify that we are POSIX.1-2001 compliant and XSI-conforming. We only
# need to specify _XOPEN_SOURCE as _POSIX_C_SOURCE will be set implicitly.
set(CMAKE_C_FLAGS "-std=c99 -pedantic -Wall -Wextra -D_XOPEN_SOURCE=600"
diff --git a/CONTRIBUTIONS.md b/CONTRIBUTIONS.md
index 3da5037..ab8374c 100644
--- a/CONTRIBUTIONS.md
+++ b/CONTRIBUTIONS.md
@@ -3,7 +3,7 @@ Contributing to PDLTools
If you're a Pivotal employee and would like to contribute to PDLTools, this guide is for you. Following these step-by-step instructions you should be able to easily add your module to PDLTools.
1. Since you may not have push access to the master repo, fork the base repo [pivotalsoftware/PDLTools](https://github.com/pivotalsoftware/PDLTools), into your own account on GitHub.
-2. Clone your forked repo into a VM. You can download the GPDB sandbox VM here: [GPDB Sandbox](https://network.pivotal.io/products/pivotal-gpdb#/releases/567/file_groups/337). Make sure you create an account on [PivNet](http://network.pivotal.io). You can get the latest GPDB sandbox VMs by going directly to [greenplum.org](http://greenplum.org)
+2. Clone your forked repo into a VM. You can download the GPDB sandbox VM here: [GPDB Sandbox](https://network.pivotal.io/products/pivotal-gpdb#/releases/567/file_groups/337). Make sure you create an account on [PivNet](https://network.pivotal.io). You can get the latest GPDB sandbox VMs by going directly to [greenplum.org](https://greenplum.org)
3. Create a branch to keep track of your contribution: `git checkout -b my_contribution`
4. Look at one of the more recent contributions such as [kd-tree](https://github.com/pivotalsoftware/PDLTools/pull/11/commits/84dcf00b72c5d4a9f11b299d7fa8b3d3b02010c7) to get an idea of all the files you'll have to touch to include your contribution. You can also look at the [sample_contribution_kl_divergence](https://github.com/pivotalsoftware/PDLTools/commit/9a0980a1b2b64a1a04c7ecfa76b233273779d191) commit to get a high level idea of what a contribution entails. Your contribution should include unit tests to validate the functionalities in your module. Also ensure your contribution is well documented. You can navigate to the `$BUILD/doc/user/html/index.html` or `$BUILD/doc/user/latex/refman.pdf` files in your local repo to check if the documentation for your contribution is appearing as expected on Doxygen docs.
5. Commit your changes to your branch (ex: `my_contribution`) on your GitHub account.
@@ -14,7 +14,7 @@ If you're a Pivotal employee and would like to contribute to PDLTools, this guid

The committers to PDLTools will see the following:

-8. The committers to pivotalsoftware/PDLTools will then merge your contribution to the base fork and voila, you should be able to see your contribution on [PDLTools User Docs](http://pivotalsoftware.github.io/PDLTools/). When a release is eventually created off the main branch, the installers for that release will contain your module.
+8. The committers to pivotalsoftware/PDLTools will then merge your contribution to the base fork and voila, you should be able to see your contribution on [PDLTools User Docs](https://pivotalsoftware.github.io/PDLTools/). When a release is eventually created off the main branch, the installers for that release will contain your module.
Creating Releases
=================
diff --git a/README.md b/README.md
index bff0fb1..f6964af 100644
--- a/README.md
+++ b/README.md
@@ -8,7 +8,7 @@ PDL Tools
Usage docs
============
-http://pivotalsoftware.github.io/PDLTools/
+https://pivotalsoftware.github.io/PDLTools/
Binaries (Pivotal internal)
============================
@@ -23,7 +23,7 @@ The following are the pre-requisites for building PDLTools:
Required:
* Pivotal Greenplum or Apache HAWQ ([GPDB sandbox](https://network.pivotal.io/products/pivotal-gpdb), [HAWQ sandbox](https://network.pivotal.io/products/pivotal-hdb))
-* Apache MADlib ([Download](http://madlib.incubator.apache.org/download.html))
+* Apache MADlib ([Download](https://madlib.incubator.apache.org/download.html))
* cmake (3.5 recommended)
* GNU C and C++ compilers (gcc, g++)
* Flex (>= 2.5.33)
diff --git a/deploy/PGXN/CMakeLists.txt b/deploy/PGXN/CMakeLists.txt
index c43e755..6692ca2 100644
--- a/deploy/PGXN/CMakeLists.txt
+++ b/deploy/PGXN/CMakeLists.txt
@@ -1,5 +1,5 @@
# ------------------------------------------------------------------------------
-# Packaging for the PostgreSQL Extension Network (PGXN), http://pgxn.org
+# Packaging for the PostgreSQL Extension Network (PGXN), https://pgxn.org
# ------------------------------------------------------------------------------
set(PDLTOOLS_PGXN_RELEASE_NUMBER 1)
diff --git a/deploy/PGXN/META.json.in b/deploy/PGXN/META.json.in
index 0b262fa..b70bc3a 100644
--- a/deploy/PGXN/META.json.in
+++ b/deploy/PGXN/META.json.in
@@ -30,6 +30,6 @@
"meta-spec": {
"version": "1.0.0",
- "url": "http://pgxn.org/meta/spec.txt"
+ "url": "https://pgxn.org/meta/spec.txt"
}
}
diff --git a/deploy/PackageMaker/Welcome.html b/deploy/PackageMaker/Welcome.html
index 9c70ffd..5b8ab7e 100644
--- a/deploy/PackageMaker/Welcome.html
+++ b/deploy/PackageMaker/Welcome.html
@@ -1,5 +1,5 @@
+ "https://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
diff --git a/deploy/gppkg/gppkg_spec.yml.in b/deploy/gppkg/gppkg_spec.yml.in
index b756531..396e1fc 100644
--- a/deploy/gppkg/gppkg_spec.yml.in
+++ b/deploy/gppkg/gppkg_spec.yml.in
@@ -24,5 +24,5 @@ PostInstall:
echo '$ pdlpack --help';
echo 'Release notes and additional documentation can be found at';
echo 'https://sites.google.com/a/pivotal.io/global-data-science/pdl-tools,';
- echo 'as well as on http://pdl-tools.pa.pivotal.io/';
+ echo 'as well as on https://pdl-tools.pa.pivotal.io/';
echo 'accessible through the Pivotal VPN.';"
diff --git a/doc/bin/doxypy.py b/doc/bin/doxypy.py
index 649281b..75576cb 100755
--- a/doc/bin/doxypy.py
+++ b/doc/bin/doxypy.py
@@ -36,7 +36,7 @@
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with this program. If not, see .
+along with this program. If not, see .
"""
import sys
diff --git a/doc/etc/header.html b/doc/etc/header.html
index 1f76724..8dc6f80 100644
--- a/doc/etc/header.html
+++ b/doc/etc/header.html
@@ -1,5 +1,5 @@
-
+
diff --git a/doc/etc/user.doxyfile.in b/doc/etc/user.doxyfile.in
index a5046ec..36b91bc 100644
--- a/doc/etc/user.doxyfile.in
+++ b/doc/etc/user.doxyfile.in
@@ -20,7 +20,7 @@
# This tag specifies the encoding used for all characters in the config file
# that follow. The default is UTF-8 which is also the encoding used for all text
# before the first occurrence of this tag. Doxygen uses libiconv (or the iconv
-# built into libc) for the transcoding. See http://www.gnu.org/software/libiconv
+# built into libc) for the transcoding. See https://www.gnu.org/software/libiconv
# for the list of possible encodings.
# The default value is: UTF-8.
@@ -308,7 +308,7 @@ EXTENSION_MAPPING = py_in=python \
# If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments
# according to the Markdown format, which allows for more readable
-# documentation. See http://daringfireball.net/projects/markdown/ for details.
+# documentation. See https://daringfireball.net/projects/markdown/ for details.
# The output of markdown processing is further processed by doxygen, so you can
# mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in
# case of backward compatibilities issues.
@@ -341,7 +341,7 @@ BUILTIN_STL_SUPPORT = NO
CPP_CLI_SUPPORT = NO
# Set the SIP_SUPPORT tag to YES if your project consists of sip (see:
-# http://www.riverbankcomputing.co.uk/software/sip/intro) sources only. Doxygen
+# https://riverbankcomputing.com/software/sip/intro) sources only. Doxygen
# will parse them like normal C++ but will assume all classes use public instead
# of private inheritance when no explicit protection keyword is present.
# The default value is: NO.
@@ -700,7 +700,7 @@ LAYOUT_FILE = DoxygenLayout.xml
# The CITE_BIB_FILES tag can be used to specify one or more bib files containing
# the reference definitions. This must be a list of .bib files. The .bib
# extension is automatically appended if omitted. This requires the bibtex tool
-# to be installed. See also http://en.wikipedia.org/wiki/BibTeX for more info.
+# to be installed. See also https://en.wikipedia.org/wiki/BibTeX for more info.
# For LaTeX the style of the bibliography can be controlled using
# LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the
# search path. Do not use file names with spaces, bibtex cannot handle them. See
@@ -782,7 +782,7 @@ INPUT = @DOXYGEN_INPUT_USER@
# This tag can be used to specify the character encoding of the source files
# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses
# libiconv (or the iconv built into libc) for the transcoding. See the libiconv
-# documentation (see: http://www.gnu.org/software/libiconv) for the list of
+# documentation (see: https://www.gnu.org/software/libiconv) for the list of
# possible encodings.
# The default value is: UTF-8.
@@ -981,7 +981,7 @@ SOURCE_TOOLTIPS = YES
# If the USE_HTAGS tag is set to YES then the references to source code will
# point to the HTML generated by the htags(1) tool instead of doxygen built-in
# source browser. The htags tool is part of GNU's global source tagging system
-# (see http://www.gnu.org/software/global/global.html). You will need version
+# (see https://www.gnu.org/software/global/global.html). You will need version
# 4.8.6 or higher.
#
# To use it do the following:
@@ -1009,7 +1009,7 @@ USE_HTAGS = NO
VERBATIM_HEADERS = NO
# If the CLANG_ASSISTED_PARSING tag is set to YES, then doxygen will use the
-# clang parser (see: http://clang.llvm.org/) for more accurate parsing at the
+# clang parser (see: https://clang.llvm.org/) for more accurate parsing at the
# cost of reduced performance. This can be particularly helpful with template
# rich C++ code for which doxygen's built-in parser lacks the necessary type
# information.
@@ -1143,7 +1143,7 @@ HTML_EXTRA_FILES =
# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen
# will adjust the colors in the stylesheet and background images according to
# this color. Hue is specified as an angle on a colorwheel, see
-# http://en.wikipedia.org/wiki/Hue for more information. For instance the value
+# https://en.wikipedia.org/wiki/Hue for more information. For instance the value
# 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300
# purple, and 360 is red again.
# Minimum value: 0, maximum value: 359, default value: 220.
@@ -1201,12 +1201,12 @@ HTML_INDEX_NUM_ENTRIES = 100
# If the GENERATE_DOCSET tag is set to YES, additional index files will be
# generated that can be used as input for Apple's Xcode 3 integrated development
-# environment (see: http://developer.apple.com/tools/xcode/), introduced with
+# environment (see: https://developer.apple.com/tools/xcode/), introduced with
# OSX 10.5 (Leopard). To create a documentation set, doxygen will generate a
# Makefile in the HTML output directory. Running make will produce the docset in
# that directory and running make install will install the docset in
# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at
-# startup. See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html
+# startup. See https://developer.apple.com/tools/creatingdocsetswithdoxygen.html
# for more information.
# The default value is: NO.
# This tag requires that the tag GENERATE_HTML is set to YES.
@@ -1246,7 +1246,7 @@ DOCSET_PUBLISHER_NAME = Publisher
# If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three
# additional HTML index files: index.hhp, index.hhc, and index.hhk. The
# index.hhp is a project file that can be read by Microsoft's HTML Help Workshop
-# (see: http://www.microsoft.com/en-us/download/details.aspx?id=21138) on
+# (see: https://www.microsoft.com/en-us/download/details.aspx?id=21138) on
# Windows.
#
# The HTML Help Workshop contains a compiler that can convert all HTML output
@@ -1322,7 +1322,7 @@ QCH_FILE =
# The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help
# Project output. For more information please see Qt Help Project / Namespace
-# (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#namespace).
+# (see: https://www.qt-project.org/doc/qt-4.8/qthelpproject.html#namespace).
# The default value is: org.doxygen.Project.
# This tag requires that the tag GENERATE_QHP is set to YES.
@@ -1330,7 +1330,7 @@ QHP_NAMESPACE =
# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt
# Help Project output. For more information please see Qt Help Project / Virtual
-# Folders (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#virtual-
+# Folders (see: https://www.qt-project.org/doc/qt-4.8/qthelpproject.html#virtual-
# folders).
# The default value is: doc.
# This tag requires that the tag GENERATE_QHP is set to YES.
@@ -1339,7 +1339,7 @@ QHP_VIRTUAL_FOLDER = doc
# If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom
# filter to add. For more information please see Qt Help Project / Custom
-# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom-
+# Filters (see: https://www.qt-project.org/doc/qt-4.8/qthelpproject.html#custom-
# filters).
# This tag requires that the tag GENERATE_QHP is set to YES.
@@ -1347,7 +1347,7 @@ QHP_CUST_FILTER_NAME =
# The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the
# custom filter to add. For more information please see Qt Help Project / Custom
-# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom-
+# Filters (see: https://www.qt-project.org/doc/qt-4.8/qthelpproject.html#custom-
# filters).
# This tag requires that the tag GENERATE_QHP is set to YES.
@@ -1355,7 +1355,7 @@ QHP_CUST_FILTER_ATTRS =
# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this
# project's filter section matches. Qt Help Project / Filter Attributes (see:
-# http://qt-project.org/doc/qt-4.8/qthelpproject.html#filter-attributes).
+# https://www.qt-project.org/doc/qt-4.8/qthelpproject.html#filter-attributes).
# This tag requires that the tag GENERATE_QHP is set to YES.
QHP_SECT_FILTER_ATTRS =
@@ -1460,7 +1460,7 @@ FORMULA_FONTSIZE = 10
FORMULA_TRANSPARENT = YES
# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see
-# http://www.mathjax.org) which uses client side Javascript for the rendering
+# https://www.mathjax.org) which uses client side Javascript for the rendering
# instead of using prerendered bitmaps. Use this if you do not have LaTeX
# installed or if you want to formulas look prettier in the HTML output. When
# enabled you may also need to install MathJax separately and configure the path
@@ -1472,7 +1472,7 @@ USE_MATHJAX = YES
# When MathJax is enabled you can set the default output format to be used for
# the MathJax output. See the MathJax site (see:
-# http://docs.mathjax.org/en/latest/output.html) for more details.
+# https://docs.mathjax.org/en/latest/output.html) for more details.
# Possible values are: HTML-CSS (which is slower, but has the best
# compatibility), NativeMML (i.e. MathML) and SVG.
# The default value is: HTML-CSS.
@@ -1487,11 +1487,11 @@ MATHJAX_FORMAT = HTML-CSS
# MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax
# Content Delivery Network so you can quickly see the result without installing
# MathJax. However, it is strongly recommended to install a local copy of
-# MathJax from http://www.mathjax.org before deployment.
-# The default value is: http://cdn.mathjax.org/mathjax/latest.
+# MathJax from https://www.mathjax.org before deployment.
+# The default value is: https://cdn.mathjax.org/mathjax/latest.
# This tag requires that the tag USE_MATHJAX is set to YES.
-MATHJAX_RELPATH = http://cdn.mathjax.org/mathjax/latest
+MATHJAX_RELPATH = https://cdn.mathjax.org/mathjax/latest
# The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax
# extension names that should be enabled during MathJax rendering. For example
@@ -1502,7 +1502,7 @@ MATHJAX_EXTENSIONS =
# The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces
# of code that will be used on startup of the MathJax code. See the MathJax site
-# (see: http://docs.mathjax.org/en/latest/output.html) for more details. For an
+# (see: https://docs.mathjax.org/en/latest/output.html) for more details. For an
# example see the documentation.
# This tag requires that the tag USE_MATHJAX is set to YES.
@@ -1549,7 +1549,7 @@ SERVER_BASED_SEARCH = NO
#
# Doxygen ships with an example indexer ( doxyindexer) and search engine
# (doxysearch.cgi) which are based on the open source search engine library
-# Xapian (see: http://xapian.org/).
+# Xapian (see: https://xapian.org/).
#
# See the section "External Indexing and Searching" for details.
# The default value is: NO.
@@ -1562,7 +1562,7 @@ EXTERNAL_SEARCH = NO
#
# Doxygen ships with an example indexer ( doxyindexer) and search engine
# (doxysearch.cgi) which are based on the open source search engine library
-# Xapian (see: http://xapian.org/). See the section "External Indexing and
+# Xapian (see: https://xapian.org/). See the section "External Indexing and
# Searching" for details.
# This tag requires that the tag SEARCHENGINE is set to YES.
@@ -1736,7 +1736,7 @@ LATEX_SOURCE_CODE = NO
# The LATEX_BIB_STYLE tag can be used to specify the style to use for the
# bibliography, e.g. plainnat, or ieeetr. See
-# http://en.wikipedia.org/wiki/BibTeX and \cite for more info.
+# https://en.wikipedia.org/wiki/BibTeX and \cite for more info.
# The default value is: plain.
# This tag requires that the tag GENERATE_LATEX is set to YES.
@@ -1892,7 +1892,7 @@ DOCBOOK_OUTPUT = docbook
#---------------------------------------------------------------------------
# If the GENERATE_AUTOGEN_DEF tag is set to YES doxygen will generate an AutoGen
-# Definitions (see http://autogen.sf.net) file that captures the structure of
+# Definitions (see http://autogen.sourceforge.net/) file that captures the structure of
# the code including all documentation. Note that this feature is still
# experimental and incomplete at the moment.
# The default value is: NO.
@@ -2103,7 +2103,7 @@ HIDE_UNDOC_RELATIONS = YES
# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is
# available from the path. This tool is part of Graphviz (see:
-# http://www.graphviz.org/), a graph visualization toolkit from AT&T and Lucent
+# https://www.graphviz.org/), a graph visualization toolkit from AT&T and Lucent
# Bell Labs. The other options in this section have no effect if this option is
# set to NO
# The default value is: NO.
diff --git a/doc/installpage.dox.in b/doc/installpage.dox.in
index 133a804..e14bb63 100644
--- a/doc/installpage.dox.in
+++ b/doc/installpage.dox.in
@@ -72,7 +72,7 @@ For additional options run:
$ pdlpack --help
Release notes and additional documentation can be found at
https://sites.google.com/a/pivotal.io/global-data-science/pdl-tools,
-as well as on http://pdl-tools.pa.pivotal.io/
+as well as on https://pdl-tools.pa.pivotal.io/
accessible through the Pivotal VPN.
20140826:07:05:23:015733 gppkg:localhost:gpadmin-[INFO]:-pdltools-@PDLTOOLS_VERSION_STRING@-gpdb4.3-
rhel5-x86_64.gppkg successfully installed.
diff --git a/doc/mainpage.dox.in b/doc/mainpage.dox.in
index d1671a7..513ec51 100644
--- a/doc/mainpage.dox.in
+++ b/doc/mainpage.dox.in
@@ -246,7 +246,7 @@ A collection of machine-learning algorithms.
merging closest clusters. Complete-linkage clustering defines the distance
between two clusters as the distance between the farthest pair of data points
in the two clusters
- (See http://en.wikipedia.org/wiki/Complete-linkage_clustering).
+ (See https://en.wikipedia.org/wiki/Complete-linkage_clustering).
(Written by [Jin Yu](mailto:jyu@pivotal.io), 22 Sep 2014.)
@@ -264,7 +264,7 @@ A collection of machine-learning algorithms.
@brief KD-tree and its application to k-nearest neighbour search
Construction of a KD-tree that organizes multi-dimensional data in a binary tree
- (http://en.wikipedia.org/wiki/K-d_tree). The data is recursively partitioned at the
+ (https://en.wikipedia.org/wiki/K-d_tree). The data is recursively partitioned at the
median value along the dimension of maximum variance. KD-tree can be used for
efficient nearest neighbour search in a low dimensional space (e.g. dimensionality < 10).
The search function is intended for large-scale problems, e.g. finding nearest neighbours
diff --git a/doc/src/sql.ll b/doc/src/sql.ll
index e0a62b4..e04bd21 100644
--- a/doc/src/sql.ll
+++ b/doc/src/sql.ll
@@ -247,7 +247,7 @@ FLOATING_POINT_LITERAL ([[:digit:]]+"."[[:digit:]]*|"."[[:digit:]]+){EXPONENT}?|
}
{DOLLARQUOTE} {
/* String literals in dollar quotes, see
- http://www.postgresql.org/docs/current/static/sql-syntax-lexical.html#SQL-SYNTAX-DOLLAR-QUOTING */
+ https://www.postgresql.org/docs/current/static/sql-syntax-lexical.html#SQL-SYNTAX-DOLLAR-QUOTING */
stringLiteralQuotation = static_cast( malloc(yyleng - 1) );
strncpy(stringLiteralQuotation, yytext + 1, yyleng - 1);
yy_push_state(sDOLLAR_STRING_LITERAL);
diff --git a/src/modules/nlp/PorterStemmer.c b/src/modules/nlp/PorterStemmer.c
index 5598fce..42ff5d7 100644
--- a/src/modules/nlp/PorterStemmer.c
+++ b/src/modules/nlp/PorterStemmer.c
@@ -9,7 +9,7 @@
only differing from it at the points maked --DEPARTURE-- below.
- See also http://www.tartarus.org/~martin/PorterStemmer
+ See also https://www.tartarus.org/~martin/PorterStemmer
The algorithm as described in the paper could be exactly replicated
by adjusting the points of DEPARTURE, but this is barely necessary,
diff --git a/src/ports/greenplum/cmake/FindPostgreSQL.cmake b/src/ports/greenplum/cmake/FindPostgreSQL.cmake
index df66a8e..3ac71ae 100644
--- a/src/ports/greenplum/cmake/FindPostgreSQL.cmake
+++ b/src/ports/greenplum/cmake/FindPostgreSQL.cmake
@@ -40,7 +40,7 @@
# Distributed under the BSD-License.
# According to
-# http://www.cmake.org/files/v2.8/CMakeChangeLog-2.8.3
+# https://www.cmake.org/files/v2.8/CMakeChangeLog-2.8.3
# the form of find_package_handle_standard_args we are using requires
# cmake >= 2.8.3
cmake_minimum_required(VERSION 2.8.3 FATAL_ERROR)
diff --git a/src/ports/greenplum/modules/complete_linkage/complete_linkage.sql_in b/src/ports/greenplum/modules/complete_linkage/complete_linkage.sql_in
index 1f7672c..3675618 100755
--- a/src/ports/greenplum/modules/complete_linkage/complete_linkage.sql_in
+++ b/src/ports/greenplum/modules/complete_linkage/complete_linkage.sql_in
@@ -24,7 +24,7 @@
@about
-This function implements the commonly used complete-linkage hierarchical clustering algorithm. It constructs a hierarchical clustering tree from bottom up by merging closest clusters. Complete-linkage clustering defines the distance between two clusters as the distance between the farthest pair of data points in the two clusters (http://en.wikipedia.org/wiki/Complete-linkage_clustering). Given a hierarchical clustering tree, one can cut the tree at a certain height to obtain flat clusters of data points (cf. \ref grp_cut_hclust_tree).
+This function implements the commonly used complete-linkage hierarchical clustering algorithm. It constructs a hierarchical clustering tree from bottom up by merging closest clusters. Complete-linkage clustering defines the distance between two clusters as the distance between the farthest pair of data points in the two clusters (https://en.wikipedia.org/wiki/Complete-linkage_clustering). Given a hierarchical clustering tree, one can cut the tree at a certain height to obtain flat clusters of data points (cf. \ref grp_cut_hclust_tree).
@anchor complete_linkage_syntax
@par Syntax
diff --git a/src/ports/greenplum/modules/generic_utilities/generic_utilities.sql_in b/src/ports/greenplum/modules/generic_utilities/generic_utilities.sql_in
index 1af9424..1fe7179 100644
--- a/src/ports/greenplum/modules/generic_utilities/generic_utilities.sql_in
+++ b/src/ports/greenplum/modules/generic_utilities/generic_utilities.sql_in
@@ -70,7 +70,7 @@ RETURNS text;
@usage
The multitbl_summary() function utilizes MADlib's summary() function to produce summary statistics for a set of provided tables.
The underlying summary function invokes various methods from the MADlib library to provide the data overview.
-More information about the underlying MADlib function can be found http://madlib.incubator.apache.org/docs/latest/group__grp__summary.html
+More information about the underlying MADlib function can be found https://madlib.incubator.apache.org/docs/latest/group__grp__summary.html
Tables listed in the input array that are not found in the system catalog (e.g. does not exist, no permissions) are excluded from summary.
@@ -254,5 +254,5 @@ $$ LANGUAGE 'plpythonu';
CREATE OR REPLACE FUNCTION PDLTOOLS_SCHEMA.multitbl_summary(usage text)
RETURNS text AS
$$
- return '''PL/python driver to run MADLIB_SCHEMA.summary() on multiple tables. Refer to: http://pivotalsoftware.github.io/PDLTools/modules.html for more details'''
+ return '''PL/python driver to run MADLIB_SCHEMA.summary() on multiple tables. Refer to: https://pivotalsoftware.github.io/PDLTools/modules.html for more details'''
$$ LANGUAGE 'plpythonu';
diff --git a/src/ports/greenplum/modules/grid_search/grid_search.sql_in b/src/ports/greenplum/modules/grid_search/grid_search.sql_in
index d8bc874..65e8839 100644
--- a/src/ports/greenplum/modules/grid_search/grid_search.sql_in
+++ b/src/ports/greenplum/modules/grid_search/grid_search.sql_in
@@ -23,7 +23,7 @@
@about
-A function that run's MADlib's k-Means function with varying values for k and saves the model outputs into one table. See the MADlib's documentation for more details.
+A function that run's MADlib's k-Means function with varying values for k and saves the model outputs into one table. See the MADlib's documentation for more details.
@anchor kmeans_syntax
@par Syntax
@@ -79,7 +79,7 @@ performance reasons.])
squared_dist_norm2: squared Euclidean distance (element-wise mean)
dist_angle: angle (element-wise mean of normalized points)
dist_tanimoto: tanimoto (element-wise mean of normalized points)
-user defined function with signature DOUBLE PRECISION[] x, DOUBLE PRECISION[] y -> DOUBLE PRECISION
See the MADlib's documentation for more details.
+user defined function with signature DOUBLE PRECISION[] x, DOUBLE PRECISION[] y -> DOUBLE PRECISION
See the MADlib's documentation for more details.
@param agg_centroid (optional) The name of the aggregate function used to determine centroids. Default: 'avg'.
The following aggregate functions can be used:
- avg: average (Default)
- normalized_avg: normalized average
@@ -309,7 +309,7 @@ simple_silhouette | 0.877815152206439
@about
-Elastic Nets combine the the benefits of L1 and L2 regularizations with regression models. L1 regularization results in very sparse features, while L2 regularization results in features whose coefficients are closer to zero. By adjusting the alpha and lambda parameters, we can achieve the benefits of both L1 and L2 regularization. Refer to MADlib Elastic Net Regression for more information.
+Elastic Nets combine the the benefits of L1 and L2 regularizations with regression models. L1 regularization results in very sparse features, while L2 regularization results in features whose coefficients are closer to zero. By adjusting the alpha and lambda parameters, we can achieve the benefits of both L1 and L2 regularization. Refer to MADlib Elastic Net Regression for more information.
@anchor elasticnet_syntax
@par Syntax
@@ -357,7 +357,7 @@ returns text;
@anchor elasticnet_usage
@usage
-Given an array of alphas and lambdas, the function runs MADlib's Elastic Net Regularization with k-fold cross-validation. The resulting table contains the AUC values for each model iteration. Refer to MADlib Elastic Net Regression for more information.
+Given an array of alphas and lambdas, the function runs MADlib's Elastic Net Regularization with k-fold cross-validation. The resulting table contains the AUC values for each model iteration. Refer to MADlib Elastic Net Regression for more information.
@anchor elasticnet_example
@examp
@@ -510,7 +510,7 @@ $$
(
select
num_clusters,
- --Refer: http://en.wikipedia.org/wiki/Silhouette_(clustering)
+ --Refer: https://en.wikipedia.org/wiki/Silhouette_(clustering)
avg(
CASE WHEN distances[2] = 0 THEN 0
ELSE (distances[2] - distances[1]) / distances[2]
diff --git a/src/ports/greenplum/modules/hits/hits.sql_in b/src/ports/greenplum/modules/hits/hits.sql_in
index 1504e45..ab76c4d 100644
--- a/src/ports/greenplum/modules/hits/hits.sql_in
+++ b/src/ports/greenplum/modules/hits/hits.sql_in
@@ -13,7 +13,7 @@
/**
@addtogroup grp_hits
-@brief Use the HITS alogrithm to calculate authority and hub scores of directed graph (http://en.wikipedia.org/wiki/HITS_algorithm).
+@brief Use the HITS alogrithm to calculate authority and hub scores of directed graph (https://en.wikipedia.org/wiki/HITS_algorithm).
Contents
@@ -155,7 +155,7 @@ E | 0 | 0.25000000000000000000 | 3
/**
- * @brief Use the HITS alogrithm to calculate authority and hub scores of directed graph (http://en.wikipedia.org/wiki/HITS_algorithm).
+ * @brief Use the HITS alogrithm to calculate authority and hub scores of directed graph (https://en.wikipedia.org/wiki/HITS_algorithm).
*
* @param input_tab Input table that contains the edges of a directed graph.
* @param source_node Column that specifies source nodes.
diff --git a/src/ports/greenplum/modules/kd_tree/kd_tree.sql_in b/src/ports/greenplum/modules/kd_tree/kd_tree.sql_in
index dbe3f4d..1ee1d3c 100644
--- a/src/ports/greenplum/modules/kd_tree/kd_tree.sql_in
+++ b/src/ports/greenplum/modules/kd_tree/kd_tree.sql_in
@@ -23,7 +23,7 @@
@about
-A KD-tree is a data structure that organizes multi-dimensional data in a binary tree (http://en.wikipedia.org/wiki/K-d_tree).
+A KD-tree is a data structure that organizes multi-dimensional data in a binary tree (https://en.wikipedia.org/wiki/K-d_tree).
It is widely used for efficient nearest neighbour search in a low dimensional space (\ref grp_kdtree_knn).
Due to the "curse of dimensionality", the search however becomes less effective as the dimensionality of the
space increases (e.g. > 10). This function creates a KD-tree by recursively partitioning the data at the median
diff --git a/src/ports/greenplum/modules/pagerank/pagerank.sql_in b/src/ports/greenplum/modules/pagerank/pagerank.sql_in
index 718261f..f8084f5 100644
--- a/src/ports/greenplum/modules/pagerank/pagerank.sql_in
+++ b/src/ports/greenplum/modules/pagerank/pagerank.sql_in
@@ -126,7 +126,7 @@ SELECT * FROM pagerank_test_output ORDER BY node;
/**
- * @brief Calculate PageRank of directed graph (http://en.wikipedia.org/wiki/PageRank).
+ * @brief Calculate PageRank of directed graph (https://en.wikipedia.org/wiki/PageRank).
*
* @param input_tab Input table that contains the edges of a directed graph.
* @param source_node Column that specifies source nodes.
diff --git a/src/ports/greenplum/modules/stemming/porter_stemmer.sql_in b/src/ports/greenplum/modules/stemming/porter_stemmer.sql_in
index 2aae854..0de9842 100644
--- a/src/ports/greenplum/modules/stemming/porter_stemmer.sql_in
+++ b/src/ports/greenplum/modules/stemming/porter_stemmer.sql_in
@@ -6,7 +6,7 @@
@author PL/C Wrapper written by Srivatsan Ramanujam
porting original inventor Martin Porter's code
- from http://tartarus.org/martin/PorterStemmer/c_thread_safe.txt
+ from https://tartarus.org/martin/PorterStemmer/c_thread_safe.txt
@date 12 Aug 2014
*//* ----------------------------------------------------------------------- */
diff --git a/src/ports/greenplum/modules/uri_utils/test/test_uri_utils.sql_in b/src/ports/greenplum/modules/uri_utils/test/test_uri_utils.sql_in
index 278ad12..978aea0 100644
--- a/src/ports/greenplum/modules/uri_utils/test/test_uri_utils.sql_in
+++ b/src/ports/greenplum/modules/uri_utils/test/test_uri_utils.sql_in
@@ -9,7 +9,7 @@ as
select (t).*
from
(
- select parse_uri($BODY$http://myself:password@www.Pivotal.io:80/%7ehello/to/you/index.html?who=I&whom=me&more=a%20%22''%5E%5e%41#here$BODY$,false,false) as t
+ select parse_uri($BODY$https://myself:password@www.Pivotal.io:80/%7ehello/to/you/index.html?who=I&whom=me&more=a%20%22''%5E%5e%41#here$BODY$,false,false) as t
)test
)
select (
diff --git a/src/ports/greenplum/modules/uri_utils/uri_utils.sql_in b/src/ports/greenplum/modules/uri_utils/uri_utils.sql_in
index 7047274..ff60cf0 100644
--- a/src/ports/greenplum/modules/uri_utils/uri_utils.sql_in
+++ b/src/ports/greenplum/modules/uri_utils/uri_utils.sql_in
@@ -195,7 +195,7 @@ affected by the question of whether \c "normalize" is true or false.
@verbatim
user=# \x
Expanded display is on.
-user=# SELECT * FROM parse_uri('http://myself:password@www.Pivotal.io:80/%7ehello/to/you/index.html?who=I&whom=me#here',true,true);
+user=# SELECT * FROM parse_uri('https://myself:password@www.Pivotal.io:80/%7ehello/to/you/index.html?who=I&whom=me#here',true,true);
-[ RECORD 1 ]+---------------------------
scheme | http
userinfo | myself:password
@@ -269,7 +269,7 @@ Example
=======
user=# \x
Expanded display is on.
-user=# SELECT * FROM PDLTOOLS_SCHEMA.parse_uri('http://myself:password@www.Pivotal.io:80/%7ehello/to/you/index.html?who=I&whom=me#here',true,true);
+user=# SELECT * FROM PDLTOOLS_SCHEMA.parse_uri('https://myself:password@www.Pivotal.io:80/%7ehello/to/you/index.html?who=I&whom=me#here',true,true);
-[ RECORD 1 ]+---------------------------
scheme | http
userinfo | myself:password