Skip site navigation (1)Skip section navigation (2)
Date:      Wed, 23 Aug 2000 15:27:45 -0500 (CDT)
From:      missnglnk@sneakerz.org
To:        FreeBSD-gnats-submit@freebsd.org
Subject:   ports/20809: New Port: wget-ssl-1.5.3 
Message-ID:  <20000823202745.34E685D006@sneakerz.org>

next in thread | raw e-mail | index | archive | help

>Number:         20809
>Category:       ports
>Synopsis:       New Port: wget-ssl-1.5.3
>Confidential:   no
>Severity:       non-critical
>Priority:       low
>Responsible:    freebsd-ports
>State:          open
>Quarter:        
>Keywords:       
>Date-Required:
>Class:          change-request
>Submitter-Id:   current-users
>Arrival-Date:   Wed Aug 23 13:30:01 PDT 2000
>Closed-Date:
>Last-Modified:
>Originator:     missnglnk
>Release:        FreeBSD 4.1-FEARSOME-20000818 i386
>Organization:
>Environment:

	Any FreeBSD system with ports collection.

>Description:

 	Wget-ssl is the original wget utility with an SSL patch to include
	support for HTTPS URL's from J.Sobieszek@elka.pw.edu.pl	

>How-To-Repeat:
>Fix:

# This is a shell archive.  Save it in a file, remove anything before
# this line, and then unpack it by entering "sh file".  Note, it may
# create directories; files and directories will be owned by you and
# have default permissions.
#
# This archive contains:
#
#	wget-ssl/
#	wget-ssl/files
#	wget-ssl/files/md5
#	wget-ssl/pkg
#	wget-ssl/pkg/COMMENT
#	wget-ssl/pkg/DESCR
#	wget-ssl/pkg/PLIST
#	wget-ssl/Makefile
#	wget-ssl/patches
#	wget-ssl/patches/patch-ab
#	wget-ssl/patches/patch-main.c
#	wget-ssl/patches/patch-retr.c
#
echo c - wget-ssl/
mkdir -p wget-ssl/ > /dev/null 2>&1
echo c - wget-ssl/files
mkdir -p wget-ssl/files > /dev/null 2>&1
echo x - wget-ssl/files/md5
sed 's/^X//' >wget-ssl/files/md5 << 'END-of-wget-ssl/files/md5'
XMD5 (wget-1.5.3.tar.gz) = 47680b25bf893afdb0c43b24e3fc2fd6
XMD5 (wget-1.5.3-ssl.patch.gz) = f2b2fe0d7cae4b7d283a813134282a2d
END-of-wget-ssl/files/md5
echo c - wget-ssl/pkg
mkdir -p wget-ssl/pkg > /dev/null 2>&1
echo x - wget-ssl/pkg/COMMENT
sed 's/^X//' >wget-ssl/pkg/COMMENT << 'END-of-wget-ssl/pkg/COMMENT'
XRetrieve files from the 'net via HTTP, HTTPS, and FTP
END-of-wget-ssl/pkg/COMMENT
echo x - wget-ssl/pkg/DESCR
sed 's/^X//' >wget-ssl/pkg/DESCR << 'END-of-wget-ssl/pkg/DESCR'
XWget [formerly known as Geturl] is a freely available network utility
Xto retrieve files from the World Wide Web using HTTP and FTP, the two
Xmost widely used Internet protocols.  It works non-interactively, thus
Xenabling work in the background, after having logged off.
X
XThe recursive retrieval of HTML pages, as well as FTP sites is
Xsupported -- you can use Wget to make mirrors of archives and home
Xpages, or traverse the web like a WWW robot (Wget understands
X/robots.txt).
END-of-wget-ssl/pkg/DESCR
echo x - wget-ssl/pkg/PLIST
sed 's/^X//' >wget-ssl/pkg/PLIST << 'END-of-wget-ssl/pkg/PLIST'
Xbin/wget
Xetc/wgetrc
X@unexec install-info --delete %D/info/wget.info %D/info/dir
Xinfo/wget.info
Xinfo/wget.info-1
Xinfo/wget.info-2
Xinfo/wget.info-3
Xshare/locale/cs/LC_MESSAGES/wget.mo
Xshare/locale/de/LC_MESSAGES/wget.mo
Xshare/locale/hr/LC_MESSAGES/wget.mo
Xshare/locale/no/LC_MESSAGES/wget.mo
Xshare/locale/it/LC_MESSAGES/wget.mo
Xshare/locale/pt_BR/LC_MESSAGES/wget.mo
X@exec install-info %D/info/wget.info %D/info/dir
END-of-wget-ssl/pkg/PLIST
echo x - wget-ssl/Makefile
sed 's/^X//' >wget-ssl/Makefile << 'END-of-wget-ssl/Makefile'
X# New ports collection makefile for:   wget-ssl
X# Version required:            1.5.3
X# Date Created:                23 August 2000
X# Whom:                        missnglnk <missnglnk@sneakerz.org>
X#
X# $FreeBSD$
X#
X
XDISTNAME=	wget-1.5.3
XPKGNAME=	wget-ssl-1.5.3
XCATEGORIES=	ftp www
XMASTER_SITES=	${MASTER_SITE_GNU} \
X		http://www.sneakerz.org/~missnglnk/freebsd/ports/ \
X		ftp://ftp.dl.ac.uk/ccp14/ftp-mirror/wget/pub/unix/util/wget/ \
X		ftp://ftp.informatik.hu-berlin.de/gnu/wget/ \
X		ftp://ftp.uni-mainz.de/pub/gnu/wget/ \
X		ftp://ftp.max.irk.ru/unix/net/www/wget/
XMASTER_SITE_SUBDIR=	wget
X
XMAINTAINER=	missnglnk@sneakerz.org
XUSE_OPENSSL=	RSA
X
XLIB_DEPENDS=	intl.1:${PORTSDIR}/devel/gettext
X
XUSE_AUTOCONF=	yes
XCONFIGURE_ENV=	CPPFLAGS="-I${PREFIX}/include" \
X		LDFLAGS="-L${PREFIX}/lib"
XCONFIGURE_ARGS=	--with-OpenSSL \
X		--with-ssl-incdir="${OPENSSLINC}/openssl" \
X		--with-ssl-libdir="${OPENSSLLIB}"
X
XINFOFILES=	wget.info
X
XPATCH_SITES=	http://www.sneakerz.org/~missnglnk/freebsd/ports/
XPATCHFILES=	wget-1.5.3-ssl.patch.gz
XPATCH_DIST_STRIP=	-p1
X
Xpost-install:
X.for i in ${INFOFILES}
X	install-info ${PREFIX}/info/${i} ${PREFIX}/info/dir
X.endfor
X
X.include <bsd.port.mk>
END-of-wget-ssl/Makefile
echo c - wget-ssl/patches
mkdir -p wget-ssl/patches > /dev/null 2>&1
echo x - wget-ssl/patches/patch-ab
sed 's/^X//' >wget-ssl/patches/patch-ab << 'END-of-wget-ssl/patches/patch-ab'
X--- src/url.c.orig	Thu Sep 10 17:23:26 1998
X+++ src/url.c	Fri Feb 11 23:41:42 2000
X@@ -56,9 +56,9 @@
X    encoding, and \033 for safe printing.  */
X 
X #ifndef WINDOWS
X-# define URL_UNSAFE " <>\"#%{}|\\^~[]`@:\033"
X+# define URL_UNSAFE "& <>\"#%{}|\\^~[]`@:\033"
X #else  /* WINDOWS */
X-# define URL_UNSAFE " <>\"%{}|\\^[]`\033"
X+# define URL_UNSAFE "& <>\"%{}|\\^[]`\033"
X #endif /* WINDOWS */
X 
X /* If S contains unsafe characters, free it and replace it with a
X@@ -143,6 +143,7 @@
X static uerr_t parse_uname PARAMS ((const char *, char **, char **));
X static char *construct PARAMS ((const char *, const char *, int , int));
X static char *construct_relative PARAMS ((const char *, const char *));
X+static char *construct_escape PARAMS ((const char *path));
X static char process_ftp_type PARAMS ((char *));
X 
X 
X@@ -626,7 +627,7 @@
X str_url (const struct urlinfo *u, int hide)
X {
X   char *res, *host, *user, *passwd, *proto_name, *dir, *file;
X-  int i, l, ln, lu, lh, lp, lf, ld;
X+  int i, l, ln, lu, lh, lp, lf, ld, offset;
X 
X   /* Look for the protocol name.  */
X   for (i = 0; i < ARRAY_SIZE (sup_protos); i++)
X@@ -637,7 +638,27 @@
X   proto_name = sup_protos[i].name;
X   host = CLEANDUP (u->host);
X   dir = CLEANDUP (u->dir);
X+  l = strlen(dir);
X+  offset = 0;
X+  for(i = 0, offset = 0; i < l ; i++, offset++) {
X+    dir[offset] = dir[i];
X+    if(strncasecmp(dir + i, "%26amp;", 7) == 0) {
X+      i += 6;
X+      offset += 2;
X+    }
X+  }
X+  dir[offset] = 0;
X   file = CLEANDUP (u->file);
X+  l = strlen(file);
X+  offset = 0;
X+  for(i = 0, offset = 0; i < l ; i++, offset++) {
X+    file[offset] = file[i];
X+    if(strncasecmp(file + i, "%26amp;", 7) == 0) {
X+      i += 6;
X+      offset += 2;
X+    }
X+  }
X+  file[offset] = 0;
X   user = passwd = NULL;
X   if (u->user)
X     user = CLEANDUP (u->user);
X@@ -1314,6 +1335,24 @@
X 	  strncat (constr + i, sub, subsize);
X 	  constr[i + subsize] = '\0';
X 	} /* *sub == `/' */
X+        {
X+                int len, current;
X+                len = strlen(constr);
X+                current = 0;
X+                for(i = 0 ; i < len ; i++, current++) {
X+                        if(strncmp(constr + i, "/../", 4) == 0) {
X+                                i += 4;
X+				for(current--; current > 0 ; current--) {
X+					if(constr[current] == '/') {
X+						current++;
X+						break;
X+					}
X+				}
X+                        }
X+                        constr[current] = constr[i];
X+                }
X+                constr[current] = 0;
X+        }
X     }
X   else /* !no_proto */
X     {
X@@ -1369,6 +1408,7 @@
X   FILE *fp;
X   char *buf, *p, *p2;
X   long size;
X+  int i;
X 
X   logprintf (LOG_VERBOSE, _("Converting %s... "), file);
X   /* Read from the file....  */
X@@ -1401,23 +1441,46 @@
X       /* If the URL already is relative or it is not to be converted
X 	 for some other reason (e.g. because of not having been
X 	 downloaded in the first place), skip it.  */
X-      if ((l->flags & URELATIVE) || !(l->flags & UABS2REL))
X-	{
X-	  DEBUGP (("Skipping %s at position %d (flags %d).\n", l->url,
X-		   l->pos, l->flags));
X-	  continue;
X-	}
X+      if((l->flags & UABS2REL) == 0) {
X+	DEBUGP (("Skipping %s at position %d (flags %d).\n", l->url,
X+	   l->pos, l->flags));
X+
X+	continue;
X+      }
X       /* Else, reach the position of the offending URL, echoing
X 	 everything up to it to the outfile.  */
X       for (p2 = buf + l->pos; p < p2; p++)
X 	putc (*p, fp);
X-      if (l->flags & UABS2REL)
X-	{
X-	  char *newname = construct_relative (file, l->local_name);
X-	  fprintf (fp, "%s", newname);
X+      if(l->local_name != NULL) {
X+	  char *newname;
X+	  char *collect;
X+
X+	  newname = construct_relative (file, l->local_name);
X+	  collect = construct_escape(newname);
X+	  fprintf (fp, "%s", collect);
X 	  DEBUGP (("ABS2REL: %s to %s at position %d in %s.\n",
X 		   l->url, newname, l->pos, file));
X 	  free (newname);
X+	  free (collect);
X+	} else {
X+	  struct urlinfo *url = newurl();
X+	  char *collect;
X+	  char *newname;
X+
X+	  parseurl(l->url, url, 0);
X+	  l->local_name = url_filename(url);
X+	  newname = construct_escape(file);
X+	  collect = construct_relative (newname, l->local_name);
X+	  free(l->local_name);
X+	  l->local_name = NULL;
X+
X+	  fprintf (fp, "%s", collect);
X+	  DEBUGP (("ABS2REL: %s to %s at position %d in %s.\n",
X+		   l->url, collect, l->pos, file));
X+	  free (collect);
X+	  free (newname);
X+
X+	  freeurl(url, 1);
X 	}
X       p += l->size;
X     }
X@@ -1429,6 +1492,32 @@
X   fclose (fp);
X   free (buf);
X   logputs (LOG_VERBOSE, _("done.\n"));
X+}
X+
X+/*
X+ */
X+static char * construct_escape(const char *path)
X+{
X+	unsigned int	length = strlen(path);
X+	unsigned int	i, offset;
X+	char *		string = NULL;
X+
X+	for(i = 0 ; path[i] != 0 ; i++) {
X+		if(path[i] == '%') {
X+			length += 2;
X+		}
X+	}
X+	string = xmalloc(length + 1);
X+	for(i = 0, offset = 0 ; path[i] != 0 ; i++, offset++) {
X+		string[offset] = path[i];
X+		if(path[i] == '%') {
X+			string[offset + 1] = '2';
X+			string[offset + 2] = '5';
X+			offset += 2;
X+		}
X+	}
X+	string[offset] = 0;
X+	return string;
X }
X 
X /* Construct and return a malloced copy of the relative link from two
END-of-wget-ssl/patches/patch-ab
echo x - wget-ssl/patches/patch-main.c
sed 's/^X//' >wget-ssl/patches/patch-main.c << 'END-of-wget-ssl/patches/patch-main.c'
X--- src/main.c.orig	Wed Feb  9 00:12:01 2000
X+++ src/main.c	Wed Feb  9 00:17:06 2000
X@@ -655,10 +655,11 @@
X   /* Retrieve the URLs from argument list.  */
X   for (t = url; *t; t++)
X     {
X-      char *filename, *new_file;
X+      char *filename, *new_file, *referer;
X       int dt;
X 
X-      status = retrieve_url (*t, &filename, &new_file, NULL, &dt);
X+      referer = getenv("referer");
X+      status = retrieve_url (*t, &filename, &new_file, referer, &dt);
X       if (opt.recursive && status == RETROK && (dt & TEXTHTML))
X 	status = recursive_retrieve (filename, new_file ? new_file : *t);
X       FREE_MAYBE (new_file);
X
END-of-wget-ssl/patches/patch-main.c
echo x - wget-ssl/patches/patch-retr.c
sed 's/^X//' >wget-ssl/patches/patch-retr.c << 'END-of-wget-ssl/patches/patch-retr.c'
X--- src/retr.c.orig	Wed Feb  9 00:18:45 2000
X+++ src/retr.c	Wed Feb  9 00:18:51 2000
X@@ -445,7 +445,7 @@
X   recursive_reset ();
X   for (cur_url = url_list; cur_url; cur_url = cur_url->next, ++*count)
X     {
X-      char *filename, *new_file;
X+      char *filename, *new_file, *referer;
X       int dt;
X 
X       if (opt.quota && opt.downloaded > opt.quota)
X@@ -453,7 +453,8 @@
X 	  status = QUOTEXC;
X 	  break;
X 	}
X-      status = retrieve_url (cur_url->url, &filename, &new_file, NULL, &dt);
X+      referer = getenv("referer");
X+      status = retrieve_url (cur_url->url, &filename, &new_file, referer, &dt);
X       if (opt.recursive && status == RETROK && (dt & TEXTHTML))
X 	status = recursive_retrieve (filename, new_file ? new_file : cur_url->url);
X 
END-of-wget-ssl/patches/patch-retr.c
exit


>Release-Note:
>Audit-Trail:
>Unformatted:


To Unsubscribe: send mail to majordomo@FreeBSD.org
with "unsubscribe freebsd-ports" in the body of the message




Want to link to this message? Use this URL: <https://mail-archive.FreeBSD.org/cgi/mid.cgi?20000823202745.34E685D006>