Skip site navigation (1)Skip section navigation (2)
Date:      Sat, 9 Mar 2019 17:40:54 GMT
From:      pkg-fallout@FreeBSD.org
To:        skreuzer@FreeBSD.org
Cc:        pkg-fallout@FreeBSD.org
Subject:   [package - 120amd64-default][www/py-scrapy] Failed for py36-Scrapy-1.6.0 in run-depends
Message-ID:  <201903091740.x29Hesm3073301@beefy6.nyi.freebsd.org>

next in thread | raw e-mail | index | archive | help
You are receiving this mail as a port that you maintain
is failing to build on the FreeBSD package build server.
Please investigate the failure and submit a PR to fix
build.

Maintainer:     skreuzer@FreeBSD.org
Last committer: skreuzer@FreeBSD.org
Ident:          $FreeBSD: head/www/py-scrapy/Makefile 495093 2019-03-09 00:55:41Z skreuzer $
Log URL:        http://beefy6.nyi.freebsd.org/data/120amd64-default/495093/logs/py36-Scrapy-1.6.0.log
Build URL:      http://beefy6.nyi.freebsd.org/build.html?mastername=120amd64-default&build=495093
Log:

=>> Building www/py-scrapy
build started at Sat Mar  9 17:40:45 UTC 2019
port directory: /usr/ports/www/py-scrapy
package name: py36-Scrapy-1.6.0
building for: FreeBSD 120amd64-default-job-12 12.0-RELEASE-p3 FreeBSD 12.0-RELEASE-p3 amd64
maintained by: skreuzer@FreeBSD.org
Makefile ident:      $FreeBSD: head/www/py-scrapy/Makefile 495093 2019-03-09 00:55:41Z skreuzer $
Poudriere version: 3.2.8-3-g02cc9753
Host OSVERSION: 1300009
Jail OSVERSION: 1200086
Job Id: 12

---Begin Environment---
SHELL=/bin/csh
OSVERSION=1200086
UNAME_v=FreeBSD 12.0-RELEASE-p3
UNAME_r=12.0-RELEASE-p3
BLOCKSIZE=K
MAIL=/var/mail/root
STATUS=1
HOME=/root
PATH=/sbin:/bin:/usr/sbin:/usr/bin:/usr/local/sbin:/usr/local/bin:/root/bin
LOCALBASE=/usr/local
USER=root
LIBEXECPREFIX=/usr/local/libexec/poudriere
POUDRIERE_VERSION=3.2.8-3-g02cc9753
MASTERMNT=/usr/local/poudriere/data/.m/120amd64-default/ref
POUDRIERE_BUILD_TYPE=bulk
PACKAGE_BUILDING=yes
SAVED_TERM=
PWD=/usr/local/poudriere/data/.m/120amd64-default/ref/.p/pool
P_PORTS_FEATURES=FLAVORS SELECTED_OPTIONS
MASTERNAME=120amd64-default
SCRIPTPREFIX=/usr/local/share/poudriere
OLDPWD=/usr/local/poudriere/data/.m/120amd64-default/ref/.p
SCRIPTPATH=/usr/local/share/poudriere/bulk.sh
POUDRIEREPATH=/usr/local/bin/poudriere
---End Environment---

---Begin Poudriere Port Flags/Env---
PORT_FLAGS=
PKGENV=
FLAVOR=py36
DEPENDS_ARGS=
MAKE_ARGS= FLAVOR=py36
---End Poudriere Port Flags/Env---

---Begin OPTIONS List---
===> The following configuration options are available for py36-Scrapy-1.6.0:
     SSL=on: SSL protocol support
===> Use 'make config' to modify these settings
---End OPTIONS List---

--MAINTAINER--
skreuzer@FreeBSD.org
--End MAINTAINER--

--CONFIGURE_ARGS--

--End CONFIGURE_ARGS--

--CONFIGURE_ENV--
PYTHON="/usr/local/bin/python3.6" XDG_DATA_HOME=/wrkdirs/usr/ports/www/py-scrapy/work-py36  XDG_CONFIG_HOME=/wrkdirs/usr/ports/www/py-scrapy/work-py36  HOME=/wrkdirs/usr/ports/www/py-scrapy/work-py36 TMPDIR="/tmp" PATH=/wrkdirs/usr/ports/www/py-scrapy/work-py36/.bin:/sbin:/bin:/usr/sbin:/usr/bin:/usr/local/sbin:/usr/local/bin:/root/bin SHELL=/bin/sh CONFIG_SHELL=/bin/sh
--End CONFIGURE_ENV--

--MAKE_ENV--
XDG_DATA_HOME=/wrkdirs/usr/ports/www/py-scrapy/work-py36  XDG_CONFIG_HOME=/wrkdirs/usr/ports/www/py-scrapy/work-py36  HOME=/wrkdirs/usr/ports/www/py-scrapy/work-py36 TMPDIR="/tmp" PATH=/wrkdirs/usr/ports/www/py-scrapy/work-py36/.bin:/sbin:/bin:/usr/sbin:/usr/bin:/usr/local/sbin:/usr/local/bin:/root/bin NO_PIE=yes MK_DEBUG_FILES=no MK_KERNEL_SYMBOLS=no SHELL=/bin/sh NO_LINT=YES LDSHARED="cc -shared" PYTHONDONTWRITEBYTECODE= PYTHONOPTIMIZE= PREFIX=/usr/local  LOCALBASE=/usr/local  CC="cc" CFLAGS="-O2 -pipe  -fstack-protector -fno-strict-aliasing "  CPP="cpp" CPPFLAGS=""  LDFLAGS=" -fstack-protector " LIBS=""  CXX="c++" CXXFLAGS="-O2 -pipe -fstack-protector -fno-strict-aliasing  "  MANPREFIX="/usr/local" BSD_INSTALL_PROGRAM="install  -s -m 555"  BSD_INSTALL_LIB="install  -s -m 0644"  BSD_INSTALL_SCRIPT="install  -m 555"  BSD_INSTALL_DATA="install  -m 0644"  BSD_INSTALL_MAN="install  -m 444"
--End MAKE_ENV--

--PLIST_SUB--
PYTHON_INCLUDEDIR=include/python3.6m  PYTHON_LIBDIR=lib/python3.6  PYTHON_PLATFORM=freebsd12  PYTHON_PYOEXTENSION=opt-1.pyc  PYTHON_SITELIBDIR=lib/python3.6/site-packages  PYTHON_SUFFIX=36  PYTHON_VER=3.6  PYTHON_VERSION=python3.6 PYTHON2="@comment " PYTHON3="" OSREL=12.0 PREFIX=%D LOCALBASE=/usr/local  RESETPREFIX=/usr/local LIB32DIR=lib DOCSDIR="share/doc/py36-Scrapy"  EXAMPLESDIR="share/examples/py36-Scrapy"  DATADIR="share/py36-Scrapy"  WWWDIR="www/py36-Scrapy"  ETCDIR="etc/py36-Scrapy"
--End PLIST_SUB--

--SUB_LIST--
PREFIX=/usr/local LOCALBASE=/usr/local  DATADIR=/usr/local/share/py36-Scrapy DOCSDIR=/usr/local/share/doc/py36-Scrapy EXAMPLESDIR=/usr/local/share/examples/py36-Scrapy  WWWDIR=/usr/local/www/py36-Scrapy ETCDIR=/usr/local/etc/py36-Scrapy
--End SUB_LIST--

---Begin make.conf---
USE_PACKAGE_DEPENDS=yes
BATCH=yes
WRKDIRPREFIX=/wrkdirs
PORTSDIR=/usr/ports
PACKAGES=/packages
DISTDIR=/distfiles
PACKAGE_BUILDING=yes
PACKAGE_BUILDING_FLAVORS=yes
#### /usr/local/etc/poudriere.d/make.conf ####
# XXX: We really need this but cannot use it while 'make checksum' does not
# try the next mirror on checksum failure.  It currently retries the same
# failed mirror and then fails rather then trying another.  It *does*
# try the next if the size is mismatched though.
#MASTER_SITE_FREEBSD=yes
# Build ALLOW_MAKE_JOBS_PACKAGES with 2 jobs
MAKE_JOBS_NUMBER=2

# stable/10 includes src.conf too late but make.conf is in sys.mk
.if ${.CURDIR:M/poudriere/jails/10*/usr/src/usr.bin/xlint*}
# Disable build of llib now that head no longer has lint(1)
LINT=	true
.endif
#### /usr/ports/Mk/Scripts/ports_env.sh ####
_CCVERSION_921dbbb2=FreeBSD clang version 6.0.1 (tags/RELEASE_601/final 335540) (based on LLVM 6.0.1) Target: x86_64-unknown-freebsd12.0 Thread model: posix InstalledDir: /usr/bin
_ALTCCVERSION_921dbbb2=none
_CXXINTERNAL_acaad9ca=FreeBSD clang version 6.0.1 (tags/RELEASE_601/final 335540) (based on LLVM 6.0.1) Target: x86_64-unknown-freebsd12.0 Thread model: posix InstalledDir: /usr/bin "/usr/bin/ld" "--eh-frame-hdr" "-dynamic-linker" "/libexec/ld-elf.so.1" "--hash-style=both" "--enable-new-dtags" "-o" "a.out" "/usr/lib/crt1.o" "/usr/lib/crti.o" "/usr/lib/crtbegin.o" "-L/usr/lib" "/dev/null" "-lc++" "-lm" "-lgcc" "--as-needed" "-lgcc_s" "--no-as-needed" "-lc" "-lgcc" "--as-needed" "-lgcc_s" "--no-as-needed" "/usr/lib/crtend.o" "/usr/lib/crtn.o"
CC_OUTPUT_921dbbb2_58173849=yes
CC_OUTPUT_921dbbb2_9bdba57c=yes
CC_OUTPUT_921dbbb2_6a4fe7f5=yes
CC_OUTPUT_921dbbb2_6bcac02b=yes
CC_OUTPUT_921dbbb2_67d20829=yes
CC_OUTPUT_921dbbb2_bfa62e83=yes
CC_OUTPUT_921dbbb2_f0b4d593=yes
CC_OUTPUT_921dbbb2_308abb44=yes
CC_OUTPUT_921dbbb2_f00456e5=yes
CC_OUTPUT_921dbbb2_65ad290d=yes
CC_OUTPUT_921dbbb2_f2776b26=yes
CC_OUTPUT_921dbbb2_b2657cc3=yes
CC_OUTPUT_921dbbb2_380987f7=yes
CC_OUTPUT_921dbbb2_160933ec=yes
CC_OUTPUT_921dbbb2_fb62803b=yes
_OBJC_CCVERSION_921dbbb2=FreeBSD clang version 6.0.1 (tags/RELEASE_601/final 335540) (based on LLVM 6.0.1) Target: x86_64-unknown-freebsd12.0 Thread model: posix InstalledDir: /usr/bin
_OBJC_ALTCCVERSION_921dbbb2=none
ARCH=amd64
OPSYS=FreeBSD
_OSRELEASE=12.0-RELEASE-p3
OSREL=12.0
OSVERSION=1200086
PYTHONBASE=/usr/local
HAVE_COMPAT_IA32_KERN=YES
CONFIGURE_MAX_CMD_LEN=262144
HAVE_PORTS_ENV=1
#### Misc Poudriere ####
GID=0
UID=0
DISABLE_MAKE_JOBS=poudriere
---End make.conf---
--Resource limits--
cpu time               (seconds, -t)  unlimited
file size           (512-blocks, -f)  unlimited
data seg size           (kbytes, -d)  33554432
stack size              (kbytes, -s)  524288
core file size      (512-blocks, -c)  unlimited
max memory size         (kbytes, -m)  unlimited
locked memory           (kbytes, -l)  unlimited
max user processes              (-u)  89999
open files                      (-n)  1024
virtual mem size        (kbytes, -v)  unlimited
swap limit              (kbytes, -w)  unlimited
socket buffer size       (bytes, -b)  unlimited
pseudo-terminals                (-p)  unlimited
kqueues                         (-k)  unlimited
umtx shared locks               (-o)  unlimited
--End resource limits--
=======================<phase: check-sanity   >============================
===>  License BSD3CLAUSE accepted by the user
===========================================================================
=======================<phase: pkg-depends    >============================
===>   py36-Scrapy-1.6.0 depends on file: /usr/local/sbin/pkg - not found
===>   Installing existing package /packages/All/pkg-1.10.5_5.txz
[120amd64-default-job-12] Installing pkg-1.10.5_5...
[120amd64-default-job-12] Extracting pkg-1.10.5_5: .......... done
===>   py36-Scrapy-1.6.0 depends on file: /usr/local/sbin/pkg - found
===>   Returning to build of py36-Scrapy-1.6.0
===========================================================================
=======================<phase: fetch-depends  >============================
===========================================================================
=======================<phase: fetch          >============================
===>  License BSD3CLAUSE accepted by the user
===> Fetching all distfiles required by py36-Scrapy-1.6.0 for building
===========================================================================
=======================<phase: checksum       >============================
===>  License BSD3CLAUSE accepted by the user
===> Fetching all distfiles required by py36-Scrapy-1.6.0 for building
=> SHA256 Checksum OK for Scrapy-1.6.0.tar.gz.
===========================================================================
=======================<phase: extract-depends>============================
===========================================================================
=======================<phase: extract        >============================
===>  License BSD3CLAUSE accepted by the user
===> Fetching all distfiles required by py36-Scrapy-1.6.0 for building
===>  Extracting for py36-Scrapy-1.6.0
=> SHA256 Checksum OK for Scrapy-1.6.0.tar.gz.
===========================================================================
=======================<phase: patch-depends  >============================
===========================================================================
=======================<phase: patch          >============================
===>  Patching for py36-Scrapy-1.6.0
===========================================================================
=======================<phase: build-depends  >============================
===>   py36-Scrapy-1.6.0 depends on package: py36-setuptools>0 - not found
===>   Installing existing package /packages/All/py36-setuptools-40.8.0.txz
[120amd64-default-job-12] Installing py36-setuptools-40.8.0...
[120amd64-default-job-12] `-- Installing python36-3.6.8...
[120amd64-default-job-12] |   `-- Installing gettext-runtime-0.19.8.1_2...
[120amd64-default-job-12] |   | `-- Installing indexinfo-0.3.1...
[120amd64-default-job-12] |   | `-- Extracting indexinfo-0.3.1: .... done
[120amd64-default-job-12] |   `-- Extracting gettext-runtime-0.19.8.1_2: .......... done
[120amd64-default-job-12] |   `-- Installing libffi-3.2.1_3...
[120amd64-default-job-12] |   `-- Extracting libffi-3.2.1_3: .......... done
[120amd64-default-job-12] |   `-- Installing readline-7.0.5...
<snip>
copying scrapy/utils/ftp.py -> build/lib/scrapy/utils
copying scrapy/utils/http.py -> build/lib/scrapy/utils
copying scrapy/utils/multipart.py -> build/lib/scrapy/utils
copying scrapy/utils/gz.py -> build/lib/scrapy/utils
copying scrapy/utils/signal.py -> build/lib/scrapy/utils
copying scrapy/utils/benchserver.py -> build/lib/scrapy/utils
copying scrapy/utils/trackref.py -> build/lib/scrapy/utils
copying scrapy/utils/test.py -> build/lib/scrapy/utils
copying scrapy/utils/misc.py -> build/lib/scrapy/utils
copying scrapy/utils/spider.py -> build/lib/scrapy/utils
copying scrapy/utils/request.py -> build/lib/scrapy/utils
copying scrapy/utils/url.py -> build/lib/scrapy/utils
copying scrapy/utils/boto.py -> build/lib/scrapy/utils
copying scrapy/utils/datatypes.py -> build/lib/scrapy/utils
copying scrapy/utils/ossignal.py -> build/lib/scrapy/utils
copying scrapy/utils/template.py -> build/lib/scrapy/utils
copying scrapy/utils/job.py -> build/lib/scrapy/utils
copying scrapy/utils/deprecate.py -> build/lib/scrapy/utils
copying scrapy/utils/conf.py -> build/lib/scrapy/utils
creating build/lib/scrapy/contracts
copying scrapy/contracts/default.py -> build/lib/scrapy/contracts
copying scrapy/contracts/__init__.py -> build/lib/scrapy/contracts
creating build/lib/scrapy/xlib
copying scrapy/xlib/__init__.py -> build/lib/scrapy/xlib
copying scrapy/xlib/tx.py -> build/lib/scrapy/xlib
copying scrapy/xlib/pydispatch.py -> build/lib/scrapy/xlib
creating build/lib/scrapy/linkextractors
copying scrapy/linkextractors/htmlparser.py -> build/lib/scrapy/linkextractors
copying scrapy/linkextractors/__init__.py -> build/lib/scrapy/linkextractors
copying scrapy/linkextractors/sgml.py -> build/lib/scrapy/linkextractors
copying scrapy/linkextractors/lxmlhtml.py -> build/lib/scrapy/linkextractors
copying scrapy/linkextractors/regex.py -> build/lib/scrapy/linkextractors
creating build/lib/scrapy/pipelines
copying scrapy/pipelines/files.py -> build/lib/scrapy/pipelines
copying scrapy/pipelines/__init__.py -> build/lib/scrapy/pipelines
copying scrapy/pipelines/media.py -> build/lib/scrapy/pipelines
copying scrapy/pipelines/images.py -> build/lib/scrapy/pipelines
creating build/lib/scrapy/selector
copying scrapy/selector/__init__.py -> build/lib/scrapy/selector
copying scrapy/selector/unified.py -> build/lib/scrapy/selector
copying scrapy/selector/lxmlsel.py -> build/lib/scrapy/selector
copying scrapy/selector/csstranslator.py -> build/lib/scrapy/selector
creating build/lib/scrapy/core/downloader
copying scrapy/core/downloader/__init__.py -> build/lib/scrapy/core/downloader
copying scrapy/core/downloader/webclient.py -> build/lib/scrapy/core/downloader
copying scrapy/core/downloader/contextfactory.py -> build/lib/scrapy/core/downloader
copying scrapy/core/downloader/middleware.py -> build/lib/scrapy/core/downloader
copying scrapy/core/downloader/tls.py -> build/lib/scrapy/core/downloader
creating build/lib/scrapy/core/downloader/handlers
copying scrapy/core/downloader/handlers/__init__.py -> build/lib/scrapy/core/downloader/handlers
copying scrapy/core/downloader/handlers/http11.py -> build/lib/scrapy/core/downloader/handlers
copying scrapy/core/downloader/handlers/ftp.py -> build/lib/scrapy/core/downloader/handlers
copying scrapy/core/downloader/handlers/http.py -> build/lib/scrapy/core/downloader/handlers
copying scrapy/core/downloader/handlers/datauri.py -> build/lib/scrapy/core/downloader/handlers
copying scrapy/core/downloader/handlers/s3.py -> build/lib/scrapy/core/downloader/handlers
copying scrapy/core/downloader/handlers/http10.py -> build/lib/scrapy/core/downloader/handlers
copying scrapy/core/downloader/handlers/file.py -> build/lib/scrapy/core/downloader/handlers
creating build/lib/scrapy/http/request
copying scrapy/http/request/rpc.py -> build/lib/scrapy/http/request
copying scrapy/http/request/__init__.py -> build/lib/scrapy/http/request
copying scrapy/http/request/form.py -> build/lib/scrapy/http/request
creating build/lib/scrapy/http/response
copying scrapy/http/response/xml.py -> build/lib/scrapy/http/response
copying scrapy/http/response/__init__.py -> build/lib/scrapy/http/response
copying scrapy/http/response/html.py -> build/lib/scrapy/http/response
copying scrapy/http/response/text.py -> build/lib/scrapy/http/response
running egg_info
writing Scrapy.egg-info/PKG-INFO
writing dependency_links to Scrapy.egg-info/dependency_links.txt
writing entry points to Scrapy.egg-info/entry_points.txt
writing requirements to Scrapy.egg-info/requires.txt
writing top-level names to Scrapy.egg-info/top_level.txt
reading manifest file 'Scrapy.egg-info/SOURCES.txt'
reading manifest template 'MANIFEST.in'
warning: no files found matching 'license.txt' under directory 'scrapy'
no previously-included directories found matching 'docs/build'
warning: no files found matching '*' under directory 'bin'
warning: no previously-included files matching '__pycache__' found anywhere in distribution
warning: no previously-included files matching '*.py[cod]' found anywhere in distribution
writing manifest file 'Scrapy.egg-info/SOURCES.txt'
copying scrapy/VERSION -> build/lib/scrapy
copying scrapy/mime.types -> build/lib/scrapy
creating build/lib/scrapy/templates
creating build/lib/scrapy/templates/project
copying scrapy/templates/project/scrapy.cfg -> build/lib/scrapy/templates/project
creating build/lib/scrapy/templates/project/module
copying scrapy/templates/project/module/__init__.py -> build/lib/scrapy/templates/project/module
copying scrapy/templates/project/module/items.py.tmpl -> build/lib/scrapy/templates/project/module
copying scrapy/templates/project/module/middlewares.py.tmpl -> build/lib/scrapy/templates/project/module
copying scrapy/templates/project/module/pipelines.py.tmpl -> build/lib/scrapy/templates/project/module
copying scrapy/templates/project/module/settings.py.tmpl -> build/lib/scrapy/templates/project/module
creating build/lib/scrapy/templates/project/module/spiders
copying scrapy/templates/project/module/spiders/__init__.py -> build/lib/scrapy/templates/project/module/spiders
creating build/lib/scrapy/templates/spiders
copying scrapy/templates/spiders/basic.tmpl -> build/lib/scrapy/templates/spiders
copying scrapy/templates/spiders/crawl.tmpl -> build/lib/scrapy/templates/spiders
copying scrapy/templates/spiders/csvfeed.tmpl -> build/lib/scrapy/templates/spiders
copying scrapy/templates/spiders/xmlfeed.tmpl -> build/lib/scrapy/templates/spiders
===========================================================================
=======================<phase: run-depends    >============================
===>   py36-Scrapy-1.6.0 depends on package: py36-twisted>=13.1.0 - not found
===>   Installing existing package /packages/All/py36-twisted-17.9.0.txz
[120amd64-default-job-12] Installing py36-twisted-17.9.0...
[120amd64-default-job-12] `-- Installing py36-Automat-0.7.0...
[120amd64-default-job-12] |   `-- Installing py36-attrs-18.2.0...
[120amd64-default-job-12] |   `-- Extracting py36-attrs-18.2.0: .......... done
[120amd64-default-job-12] |   `-- Installing py36-six-1.12.0...
[120amd64-default-job-12] |   `-- Extracting py36-six-1.12.0: .......... done
[120amd64-default-job-12] `-- Extracting py36-Automat-0.7.0: .......... done
[120amd64-default-job-12] `-- Installing py36-constantly-15.1.0...
[120amd64-default-job-12] `-- Extracting py36-constantly-15.1.0: .......... done
[120amd64-default-job-12] `-- Installing py36-hyperlink-18.0.0...
[120amd64-default-job-12] |   `-- Installing py36-idna-2.7...
[120amd64-default-job-12] |   `-- Extracting py36-idna-2.7: .......... done
[120amd64-default-job-12] `-- Extracting py36-hyperlink-18.0.0: .......... done
[120amd64-default-job-12] `-- Installing py36-incremental-17.5.0...
[120amd64-default-job-12] `-- Extracting py36-incremental-17.5.0: .......... done
[120amd64-default-job-12] `-- Installing py36-zope.interface-4.6.0...
[120amd64-default-job-12] `-- Extracting py36-zope.interface-4.6.0: .......... done
[120amd64-default-job-12] Extracting py36-twisted-17.9.0: .......... done
===>   py36-Scrapy-1.6.0 depends on package: py36-twisted>=13.1.0 - found
===>   Returning to build of py36-Scrapy-1.6.0
===>   py36-Scrapy-1.6.0 depends on package: py36-lxml>0 - not found
===>   Installing existing package /packages/All/py36-lxml-4.3.1.txz
[120amd64-default-job-12] Installing py36-lxml-4.3.1...
[120amd64-default-job-12] `-- Installing libxml2-2.9.8...
[120amd64-default-job-12] `-- Extracting libxml2-2.9.8: .......... done
[120amd64-default-job-12] `-- Installing libxslt-1.1.32...
[120amd64-default-job-12] |   `-- Installing libgcrypt-1.8.4_1...
[120amd64-default-job-12] |   | `-- Installing libgpg-error-1.35...
[120amd64-default-job-12] |   | `-- Extracting libgpg-error-1.35: .......... done
[120amd64-default-job-12] |   `-- Extracting libgcrypt-1.8.4_1: .......... done
[120amd64-default-job-12] `-- Extracting libxslt-1.1.32: .......... done
[120amd64-default-job-12] Extracting py36-lxml-4.3.1: .......... done
===>   py36-Scrapy-1.6.0 depends on package: py36-lxml>0 - found
===>   Returning to build of py36-Scrapy-1.6.0
===>   py36-Scrapy-1.6.0 depends on package: py36-sqlite3>0 - not found
===>   Installing existing package /packages/All/py36-sqlite3-3.6.8_7.txz
[120amd64-default-job-12] Installing py36-sqlite3-3.6.8_7...
[120amd64-default-job-12] `-- Installing sqlite3-3.27.1...
[120amd64-default-job-12] `-- Extracting sqlite3-3.27.1: .......... done
[120amd64-default-job-12] Extracting py36-sqlite3-3.6.8_7: ........ done
===>   py36-Scrapy-1.6.0 depends on package: py36-sqlite3>0 - found
===>   Returning to build of py36-Scrapy-1.6.0
===>   py36-Scrapy-1.6.0 depends on package: py36-w3lib>=1.17.0 - not found
===>   Installing existing package /packages/All/py36-w3lib-1.20.0.txz
[120amd64-default-job-12] Installing py36-w3lib-1.20.0...
[120amd64-default-job-12] Extracting py36-w3lib-1.20.0: .......... done
===>   py36-Scrapy-1.6.0 depends on package: py36-w3lib>=1.17.0 - found
===>   Returning to build of py36-Scrapy-1.6.0
===>   py36-Scrapy-1.6.0 depends on package: py36-cssselect>=0.9 - not found
===>   Installing existing package /packages/All/py36-cssselect-0.9.1_2.txz
[120amd64-default-job-12] Installing py36-cssselect-0.9.1_2...
[120amd64-default-job-12] Extracting py36-cssselect-0.9.1_2: .......... done
===>   py36-Scrapy-1.6.0 depends on package: py36-cssselect>=0.9 - found
===>   Returning to build of py36-Scrapy-1.6.0
===>   py36-Scrapy-1.6.0 depends on package: py36-queuelib>0 - not found
===>   Installing existing package /packages/All/py36-queuelib-1.5.0.txz
[120amd64-default-job-12] Installing py36-queuelib-1.5.0...
[120amd64-default-job-12] Extracting py36-queuelib-1.5.0: .......... done
===>   py36-Scrapy-1.6.0 depends on package: py36-queuelib>0 - found
===>   Returning to build of py36-Scrapy-1.6.0
===>   py36-Scrapy-1.6.0 depends on package: py36-pydispatcher>=2.0.5 - not found
===>   Installing existing package /packages/All/py36-pydispatcher-2.0.5.txz
[120amd64-default-job-12] Installing py36-pydispatcher-2.0.5...
[120amd64-default-job-12] Extracting py36-pydispatcher-2.0.5: .......... done
===>   py36-Scrapy-1.6.0 depends on package: py36-pydispatcher>=2.0.5 - found
===>   Returning to build of py36-Scrapy-1.6.0
===>   py36-Scrapy-1.6.0 depends on package: py36-service_identity>0 - not found
===>   Installing existing package /packages/All/py36-service_identity-17.0.0.txz
[120amd64-default-job-12] Installing py36-service_identity-17.0.0...
[120amd64-default-job-12] `-- Installing py36-characteristic-14.3.0...
[120amd64-default-job-12] `-- Extracting py36-characteristic-14.3.0: .......... done
[120amd64-default-job-12] `-- Installing py36-openssl-18.0.0...
[120amd64-default-job-12] |   `-- Installing py36-cryptography-2.3...
[120amd64-default-job-12] |   | `-- Installing py36-asn1crypto-0.22.0...
[120amd64-default-job-12] |   | `-- Extracting py36-asn1crypto-0.22.0: .......... done
[120amd64-default-job-12] |   | `-- Installing py36-cffi-1.12.2...
[120amd64-default-job-12] |   |   `-- Installing py36-pycparser-2.18...
[120amd64-default-job-12] |   |   `-- Extracting py36-pycparser-2.18: .......... done
[120amd64-default-job-12] |   | `-- Extracting py36-cffi-1.12.2: .......... done
[120amd64-default-job-12] |   `-- Extracting py36-cryptography-2.3: .......... done
[120amd64-default-job-12] `-- Extracting py36-openssl-18.0.0: .......... done
[120amd64-default-job-12] `-- Installing py36-pyasn1-0.4.5...
[120amd64-default-job-12] `-- Extracting py36-pyasn1-0.4.5: .......... done
[120amd64-default-job-12] `-- Installing py36-pyasn1-modules-0.2.3...
[120amd64-default-job-12] `-- Extracting py36-pyasn1-modules-0.2.3: .......... done
[120amd64-default-job-12] Extracting py36-service_identity-17.0.0: .......... done
===>   py36-Scrapy-1.6.0 depends on package: py36-service_identity>0 - found
===>   Returning to build of py36-Scrapy-1.6.0
===>   py36-Scrapy-1.6.0 depends on package: py36-six>=1.5.2 - found
===>   py36-Scrapy-1.6.0 depends on package: py36-parsel>=1.5 - not found
===>   Installing existing package /packages/All/py36-parsel-1.2.0.txz
[120amd64-default-job-12] Installing py36-parsel-1.2.0...
[120amd64-default-job-12] Extracting py36-parsel-1.2.0: .......... done
===>   py36-Scrapy-1.6.0 depends on package: py36-parsel>=1.5 - not found
*** Error code 1

Stop.
make: stopped in /usr/ports/www/py-scrapy



Want to link to this message? Use this URL: <https://mail-archive.FreeBSD.org/cgi/mid.cgi?201903091740.x29Hesm3073301>