当前位置: 首页 > 编程日记 > 正文

聊聊flink的HistoryServer

为什么80%的码农都做不了架构师?>>>   hot3.png

本文主要研究一下flink的HistoryServer

HistoryServer

flink-1.7.2/flink-runtime-web/src/main/java/org/apache/flink/runtime/webmonitor/history/HistoryServer.java

public class HistoryServer {private static final Logger LOG = LoggerFactory.getLogger(HistoryServer.class);private final Configuration config;private final String webAddress;private final int webPort;private final long webRefreshIntervalMillis;private final File webDir;private final HistoryServerArchiveFetcher archiveFetcher;@Nullableprivate final SSLHandlerFactory serverSSLFactory;private WebFrontendBootstrap netty;private final Object startupShutdownLock = new Object();private final AtomicBoolean shutdownRequested = new AtomicBoolean(false);private final Thread shutdownHook;public static void main(String[] args) throws Exception {ParameterTool pt = ParameterTool.fromArgs(args);String configDir = pt.getRequired("configDir");LOG.info("Loading configuration from {}", configDir);final Configuration flinkConfig = GlobalConfiguration.loadConfiguration(configDir);try {FileSystem.initialize(flinkConfig);} catch (IOException e) {throw new Exception("Error while setting the default filesystem scheme from configuration.", e);}// run the history serverSecurityUtils.install(new SecurityConfiguration(flinkConfig));try {SecurityUtils.getInstalledContext().runSecured(new Callable<Integer>() {@Overridepublic Integer call() throws Exception {HistoryServer hs = new HistoryServer(flinkConfig);hs.run();return 0;}});System.exit(0);} catch (Throwable t) {final Throwable strippedThrowable = ExceptionUtils.stripException(t, UndeclaredThrowableException.class);LOG.error("Failed to run HistoryServer.", strippedThrowable);strippedThrowable.printStackTrace();System.exit(1);}}public HistoryServer(Configuration config) throws IOException, FlinkException {this(config, new CountDownLatch(0));}public HistoryServer(Configuration config, CountDownLatch numFinishedPolls) throws IOException, FlinkException {Preconditions.checkNotNull(config);Preconditions.checkNotNull(numFinishedPolls);this.config = config;if (config.getBoolean(HistoryServerOptions.HISTORY_SERVER_WEB_SSL_ENABLED) && SSLUtils.isRestSSLEnabled(config)) {LOG.info("Enabling SSL for the history server.");try {this.serverSSLFactory = SSLUtils.createRestServerSSLEngineFactory(config);} catch (Exception e) {throw new IOException("Failed to initialize SSLContext for the history server.", e);}} else {this.serverSSLFactory = null;}webAddress = config.getString(HistoryServerOptions.HISTORY_SERVER_WEB_ADDRESS);webPort = config.getInteger(HistoryServerOptions.HISTORY_SERVER_WEB_PORT);webRefreshIntervalMillis = config.getLong(HistoryServerOptions.HISTORY_SERVER_WEB_REFRESH_INTERVAL);String webDirectory = config.getString(HistoryServerOptions.HISTORY_SERVER_WEB_DIR);if (webDirectory == null) {webDirectory = System.getProperty("java.io.tmpdir") + File.separator + "flink-web-history-" + UUID.randomUUID();}webDir = new File(webDirectory);String refreshDirectories = config.getString(HistoryServerOptions.HISTORY_SERVER_ARCHIVE_DIRS);if (refreshDirectories == null) {throw new FlinkException(HistoryServerOptions.HISTORY_SERVER_ARCHIVE_DIRS + " was not configured.");}List<RefreshLocation> refreshDirs = new ArrayList<>();for (String refreshDirectory : refreshDirectories.split(",")) {try {Path refreshPath = WebMonitorUtils.validateAndNormalizeUri(new Path(refreshDirectory).toUri());FileSystem refreshFS = refreshPath.getFileSystem();refreshDirs.add(new RefreshLocation(refreshPath, refreshFS));} catch (Exception e) {// there's most likely something wrong with the path itself, so we ignore it from here onLOG.warn("Failed to create Path or FileSystem for directory '{}'. Directory will not be monitored.", refreshDirectory, e);}}if (refreshDirs.isEmpty()) {throw new FlinkException("Failed to validate any of the configured directories to monitor.");}long refreshIntervalMillis = config.getLong(HistoryServerOptions.HISTORY_SERVER_ARCHIVE_REFRESH_INTERVAL);archiveFetcher = new HistoryServerArchiveFetcher(refreshIntervalMillis, refreshDirs, webDir, numFinishedPolls);this.shutdownHook = ShutdownHookUtil.addShutdownHook(HistoryServer.this::stop,HistoryServer.class.getSimpleName(),LOG);}@VisibleForTestingint getWebPort() {return netty.getServerPort();}public void run() {try {start();new CountDownLatch(1).await();} catch (Exception e) {LOG.error("Failure while running HistoryServer.", e);} finally {stop();}}// ------------------------------------------------------------------------// Life-cycle// ------------------------------------------------------------------------void start() throws IOException, InterruptedException {synchronized (startupShutdownLock) {LOG.info("Starting history server.");Files.createDirectories(webDir.toPath());LOG.info("Using directory {} as local cache.", webDir);Router router = new Router();router.addGet("/:*", new HistoryServerStaticFileServerHandler(webDir));if (!webDir.exists() && !webDir.mkdirs()) {throw new IOException("Failed to create local directory " + webDir.getAbsoluteFile() + ".");}createDashboardConfigFile();archiveFetcher.start();netty = new WebFrontendBootstrap(router, LOG, webDir, serverSSLFactory, webAddress, webPort, config);}}void stop() {if (shutdownRequested.compareAndSet(false, true)) {synchronized (startupShutdownLock) {LOG.info("Stopping history server.");try {netty.shutdown();} catch (Throwable t) {LOG.warn("Error while shutting down WebFrontendBootstrap.", t);}archiveFetcher.stop();try {LOG.info("Removing web dashboard root cache directory {}", webDir);FileUtils.deleteDirectory(webDir);} catch (Throwable t) {LOG.warn("Error while deleting web root directory {}", webDir, t);}LOG.info("Stopped history server.");// Remove shutdown hook to prevent resource leaksShutdownHookUtil.removeShutdownHook(shutdownHook, getClass().getSimpleName(), LOG);}}}// ------------------------------------------------------------------------// File generation// ------------------------------------------------------------------------static FileWriter createOrGetFile(File folder, String name) throws IOException {File file = new File(folder, name + ".json");if (!file.exists()) {Files.createFile(file.toPath());}FileWriter fr = new FileWriter(file);return fr;}private void createDashboardConfigFile() throws IOException {try (FileWriter fw = createOrGetFile(webDir, "config")) {fw.write(createConfigJson(DashboardConfiguration.from(webRefreshIntervalMillis, ZonedDateTime.now())));fw.flush();} catch (IOException ioe) {LOG.error("Failed to write config file.");throw ioe;}}private static String createConfigJson(DashboardConfiguration dashboardConfiguration) throws IOException {StringWriter writer = new StringWriter();JsonGenerator gen = JsonFactory.JACKSON_FACTORY.createGenerator(writer);gen.writeStartObject();gen.writeNumberField(DashboardConfiguration.FIELD_NAME_REFRESH_INTERVAL, dashboardConfiguration.getRefreshInterval());gen.writeNumberField(DashboardConfiguration.FIELD_NAME_TIMEZONE_OFFSET, dashboardConfiguration.getTimeZoneOffset());gen.writeStringField(DashboardConfiguration.FIELD_NAME_TIMEZONE_NAME, dashboardConfiguration.getTimeZoneName());gen.writeStringField(DashboardConfiguration.FIELD_NAME_FLINK_VERSION, dashboardConfiguration.getFlinkVersion());gen.writeStringField(DashboardConfiguration.FIELD_NAME_FLINK_REVISION, dashboardConfiguration.getFlinkRevision());gen.writeEndObject();gen.close();return writer.toString();}/*** Container for the {@link Path} and {@link FileSystem} of a refresh directory.*/static class RefreshLocation {private final Path path;private final FileSystem fs;private RefreshLocation(Path path, FileSystem fs) {this.path = path;this.fs = fs;}public Path getPath() {return path;}public FileSystem getFs() {return fs;}}
}
  • HistoryServer提供了finished jobs的相关查询功能;构造器从配置中读取historyserver.web.address、historyserver.web.port(默认8082)、historyserver.web.refresh-interval(默认10秒)、historyserver.web.tmpdir、historyserver.archive.fs.dir、historyserver.archive.fs.refresh-interval(默认10秒),然后创建了HistoryServerArchiveFetcher
  • 其run方法主要是调用start方法,该方法主要是启动HistoryServerArchiveFetcher,然后创建WebFrontendBootstrap
  • 构造器使用ShutdownHookUtil.addShutdownHook注册了ShutdownHook,在shutdown时执行stop方法,stop方法主要是调用WebFrontendBootstrap的shutdown方法以及HistoryServerArchiveFetcher的stop方法,然后清理webDir,移除shutdownHook

HistoryServerArchiveFetcher

flink-1.7.2/flink-runtime-web/src/main/java/org/apache/flink/runtime/webmonitor/history/HistoryServerArchiveFetcher.java

class HistoryServerArchiveFetcher {private static final Logger LOG = LoggerFactory.getLogger(HistoryServerArchiveFetcher.class);private static final JsonFactory jacksonFactory = new JsonFactory();private static final ObjectMapper mapper = new ObjectMapper();private final ScheduledExecutorService executor = Executors.newSingleThreadScheduledExecutor(new ExecutorThreadFactory("Flink-HistoryServer-ArchiveFetcher"));private final JobArchiveFetcherTask fetcherTask;private final long refreshIntervalMillis;HistoryServerArchiveFetcher(long refreshIntervalMillis, List<HistoryServer.RefreshLocation> refreshDirs, File webDir, CountDownLatch numFinishedPolls) {this.refreshIntervalMillis = refreshIntervalMillis;this.fetcherTask = new JobArchiveFetcherTask(refreshDirs, webDir, numFinishedPolls);if (LOG.isInfoEnabled()) {for (HistoryServer.RefreshLocation refreshDir : refreshDirs) {LOG.info("Monitoring directory {} for archived jobs.", refreshDir.getPath());}}}void start() {executor.scheduleWithFixedDelay(fetcherTask, 0, refreshIntervalMillis, TimeUnit.MILLISECONDS);}void stop() {executor.shutdown();try {if (!executor.awaitTermination(1, TimeUnit.SECONDS)) {executor.shutdownNow();}} catch (InterruptedException ignored) {executor.shutdownNow();}}/*** {@link TimerTask} that polls the directories configured as {@link HistoryServerOptions#HISTORY_SERVER_ARCHIVE_DIRS} for* new job archives.*/static class JobArchiveFetcherTask extends TimerTask {private final List<HistoryServer.RefreshLocation> refreshDirs;private final CountDownLatch numFinishedPolls;/** Cache of all available jobs identified by their id. */private final Set<String> cachedArchives;private final File webDir;private final File webJobDir;private final File webOverviewDir;private static final String JSON_FILE_ENDING = ".json";JobArchiveFetcherTask(List<HistoryServer.RefreshLocation> refreshDirs, File webDir, CountDownLatch numFinishedPolls) {this.refreshDirs = checkNotNull(refreshDirs);this.numFinishedPolls = numFinishedPolls;this.cachedArchives = new HashSet<>();this.webDir = checkNotNull(webDir);this.webJobDir = new File(webDir, "jobs");webJobDir.mkdir();this.webOverviewDir = new File(webDir, "overviews");webOverviewDir.mkdir();}@Overridepublic void run() {try {for (HistoryServer.RefreshLocation refreshLocation : refreshDirs) {Path refreshDir = refreshLocation.getPath();FileSystem refreshFS = refreshLocation.getFs();// contents of /:refreshDirFileStatus[] jobArchives;try {jobArchives = refreshFS.listStatus(refreshDir);} catch (IOException e) {LOG.error("Failed to access job archive location for path {}.", refreshDir, e);continue;}if (jobArchives == null) {continue;}boolean updateOverview = false;for (FileStatus jobArchive : jobArchives) {Path jobArchivePath = jobArchive.getPath();String jobID = jobArchivePath.getName();try {JobID.fromHexString(jobID);} catch (IllegalArgumentException iae) {LOG.debug("Archive directory {} contained file with unexpected name {}. Ignoring file.",refreshDir, jobID, iae);continue;}if (cachedArchives.add(jobID)) {try {for (ArchivedJson archive : FsJobArchivist.getArchivedJsons(jobArchive.getPath())) {String path = archive.getPath();String json = archive.getJson();File target;if (path.equals(JobsOverviewHeaders.URL)) {target = new File(webOverviewDir, jobID + JSON_FILE_ENDING);} else if (path.equals("/joboverview")) { // legacy pathjson = convertLegacyJobOverview(json);target = new File(webOverviewDir, jobID + JSON_FILE_ENDING);} else {target = new File(webDir, path + JSON_FILE_ENDING);}java.nio.file.Path parent = target.getParentFile().toPath();try {Files.createDirectories(parent);} catch (FileAlreadyExistsException ignored) {// there may be left-over directories from the previous attempt}java.nio.file.Path targetPath = target.toPath();// We overwrite existing files since this may be another attempt at fetching this archive.// Existing files may be incomplete/corrupt.Files.deleteIfExists(targetPath);Files.createFile(target.toPath());try (FileWriter fw = new FileWriter(target)) {fw.write(json);fw.flush();}}updateOverview = true;} catch (IOException e) {LOG.error("Failure while fetching/processing job archive for job {}.", jobID, e);// Make sure we attempt to fetch the archive againcachedArchives.remove(jobID);// Make sure we do not include this job in the overviewtry {Files.delete(new File(webOverviewDir, jobID + JSON_FILE_ENDING).toPath());} catch (IOException ioe) {LOG.debug("Could not delete file from overview directory.", ioe);}// Clean up job files we may have createdFile jobDirectory = new File(webJobDir, jobID);try {FileUtils.deleteDirectory(jobDirectory);} catch (IOException ioe) {LOG.debug("Could not clean up job directory.", ioe);}}}}if (updateOverview) {updateJobOverview(webOverviewDir, webDir);}}} catch (Exception e) {LOG.error("Critical failure while fetching/processing job archives.", e);}numFinishedPolls.countDown();}}private static String convertLegacyJobOverview(String legacyOverview) throws IOException {JsonNode root = mapper.readTree(legacyOverview);JsonNode finishedJobs = root.get("finished");JsonNode job = finishedJobs.get(0);JobID jobId = JobID.fromHexString(job.get("jid").asText());String name = job.get("name").asText();JobStatus state = JobStatus.valueOf(job.get("state").asText());long startTime = job.get("start-time").asLong();long endTime = job.get("end-time").asLong();long duration = job.get("duration").asLong();long lastMod = job.get("last-modification").asLong();JsonNode tasks = job.get("tasks");int numTasks = tasks.get("total").asInt();int pending = tasks.get("pending").asInt();int running = tasks.get("running").asInt();int finished = tasks.get("finished").asInt();int canceling = tasks.get("canceling").asInt();int canceled = tasks.get("canceled").asInt();int failed = tasks.get("failed").asInt();int[] tasksPerState = new int[ExecutionState.values().length];// pending is a mix of CREATED/SCHEDULED/DEPLOYING// to maintain the correct number of task states we have to pick one of themtasksPerState[ExecutionState.SCHEDULED.ordinal()] = pending;tasksPerState[ExecutionState.RUNNING.ordinal()] = running;tasksPerState[ExecutionState.FINISHED.ordinal()] = finished;tasksPerState[ExecutionState.CANCELING.ordinal()] = canceling;tasksPerState[ExecutionState.CANCELED.ordinal()] = canceled;tasksPerState[ExecutionState.FAILED.ordinal()] = failed;JobDetails jobDetails = new JobDetails(jobId, name, startTime, endTime, duration, state, lastMod, tasksPerState, numTasks);MultipleJobsDetails multipleJobsDetails = new MultipleJobsDetails(Collections.singleton(jobDetails));StringWriter sw = new StringWriter();mapper.writeValue(sw, multipleJobsDetails);return sw.toString();}/*** This method replicates the JSON response that would be given by the JobsOverviewHandler when* listing both running and finished jobs.** <p>Every job archive contains a joboverview.json file containing the same structure. Since jobs are archived on* their own however the list of finished jobs only contains a single job.** <p>For the display in the HistoryServer WebFrontend we have to combine these overviews.*/private static void updateJobOverview(File webOverviewDir, File webDir) {try (JsonGenerator gen = jacksonFactory.createGenerator(HistoryServer.createOrGetFile(webDir, JobsOverviewHeaders.URL))) {File[] overviews = new File(webOverviewDir.getPath()).listFiles();if (overviews != null) {Collection<JobDetails> allJobs = new ArrayList<>(overviews.length);for (File overview : overviews) {MultipleJobsDetails subJobs = mapper.readValue(overview, MultipleJobsDetails.class);allJobs.addAll(subJobs.getJobs());}mapper.writeValue(gen, new MultipleJobsDetails(allJobs));}} catch (IOException ioe) {LOG.error("Failed to update job overview.", ioe);}}
}
  • HistoryServerArchiveFetcher主要是以historyserver.archive.fs.refresh-interval的时间间隔从historyserver.archive.fs.dir目录拉取job archives;它内部创建了JobArchiveFetcherTask来执行这个任务
  • JobArchiveFetcherTask继承了jdk的TimerTask,其run方法就是遍历refreshDirs,然后执行FileSystem.listStatus,然后使用FsJobArchivist.getArchivedJsons获取ArchivedJson根据不同path写入到指定文件
  • 如果path是/jobs/overview,则写入webDir/overviews/jobID.json文件;如果path是/joboverview,则先调用convertLegacyJobOverview转换json,然后再写入webDir/overviews/jobID.json文件;其他的path则写入webDir/path.json文件

WebFrontendBootstrap

flink-1.7.2/flink-runtime-web/src/main/java/org/apache/flink/runtime/webmonitor/utils/WebFrontendBootstrap.java

public class WebFrontendBootstrap {private final Router router;private final Logger log;private final File uploadDir;private final ServerBootstrap bootstrap;private final Channel serverChannel;private final String restAddress;public WebFrontendBootstrap(Router router,Logger log,File directory,@Nullable SSLHandlerFactory serverSSLFactory,String configuredAddress,int configuredPort,final Configuration config) throws InterruptedException, UnknownHostException {this.router = Preconditions.checkNotNull(router);this.log = Preconditions.checkNotNull(log);this.uploadDir = directory;ChannelInitializer<SocketChannel> initializer = new ChannelInitializer<SocketChannel>() {@Overrideprotected void initChannel(SocketChannel ch) {RouterHandler handler = new RouterHandler(WebFrontendBootstrap.this.router, new HashMap<>());// SSL should be the first handler in the pipelineif (serverSSLFactory != null) {ch.pipeline().addLast("ssl", serverSSLFactory.createNettySSLHandler());}ch.pipeline().addLast(new HttpServerCodec()).addLast(new ChunkedWriteHandler()).addLast(new HttpRequestHandler(uploadDir)).addLast(handler.getName(), handler).addLast(new PipelineErrorHandler(WebFrontendBootstrap.this.log));}};NioEventLoopGroup bossGroup   = new NioEventLoopGroup(1);NioEventLoopGroup workerGroup = new NioEventLoopGroup();this.bootstrap = new ServerBootstrap();this.bootstrap.group(bossGroup, workerGroup).channel(NioServerSocketChannel.class).childHandler(initializer);ChannelFuture ch;if (configuredAddress == null) {ch = this.bootstrap.bind(configuredPort);} else {ch = this.bootstrap.bind(configuredAddress, configuredPort);}this.serverChannel = ch.sync().channel();InetSocketAddress bindAddress = (InetSocketAddress) serverChannel.localAddress();InetAddress inetAddress = bindAddress.getAddress();final String address;if (inetAddress.isAnyLocalAddress()) {address = config.getString(JobManagerOptions.ADDRESS, InetAddress.getLocalHost().getHostName());} else {address = inetAddress.getHostAddress();}int port = bindAddress.getPort();this.log.info("Web frontend listening at {}" + ':' + "{}", address, port);final String protocol = serverSSLFactory != null ? "https://" : "http://";this.restAddress = protocol + address + ':' + port;}public ServerBootstrap getBootstrap() {return bootstrap;}public int getServerPort() {Channel server = this.serverChannel;if (server != null) {try {return ((InetSocketAddress) server.localAddress()).getPort();}catch (Exception e) {log.error("Cannot access local server port", e);}}return -1;}public String getRestAddress() {return restAddress;}public void shutdown() {if (this.serverChannel != null) {this.serverChannel.close().awaitUninterruptibly();}if (bootstrap != null) {if (bootstrap.group() != null) {bootstrap.group().shutdownGracefully();}if (bootstrap.childGroup() != null) {bootstrap.childGroup().shutdownGracefully();}}}
}
  • WebFrontendBootstrap使用netty启动了一个http server,其pipeline有HttpServerCodec、ChunkedWriteHandler、HttpRequestHandler、RouterHandler、PipelineErrorHandler;其中这里的RouterHandler的Router有个GET的route,其使用的是HistoryServerStaticFileServerHandler,用于给HistoryServer提供静态文件服务

小结

  • HistoryServer提供了finished jobs的相关查询功能;其主要由HistoryServerArchiveFetcher以及WebFrontendBootstrap两部分组成;其run方法主要是调用start方法,该方法主要是启动HistoryServerArchiveFetcher,然后创建WebFrontendBootstrap
  • HistoryServerArchiveFetcher主要是以historyserver.archive.fs.refresh-interval的时间间隔从historyserver.archive.fs.dir目录拉取job archives;它内部创建了JobArchiveFetcherTask来执行这个任务;JobArchiveFetcherTask继承了jdk的TimerTask,其run方法就是遍历refreshDirs,然后执行FileSystem.listStatus,然后使用FsJobArchivist.getArchivedJsons获取ArchivedJson根据不同path写入到指定文件
  • WebFrontendBootstrap使用netty启动了一个http server,其pipeline有HttpServerCodec、ChunkedWriteHandler、HttpRequestHandler、RouterHandler、PipelineErrorHandler;其中这里的RouterHandler的Router有个GET的route,其使用的是HistoryServerStaticFileServerHandler,用于给HistoryServer提供静态文件服务

doc

  • HistoryServer

转载于:https://my.oschina.net/go4it/blog/3020242

相关文章:

深度学习开源库tiny-dnn的使用(MNIST)

tiny-dnn是一个基于DNN的深度学习开源库&#xff0c;它的License是BSD 3-Clause。之前名字是tiny-cnn是基于CNN的&#xff0c;tiny-dnn与tiny-cnn相关又增加了些新层。此开源库很活跃&#xff0c;几乎每天都有新的提交&#xff0c;因此下面详细介绍下tiny-dnn在windows7 64bit …

如何学习SVM?怎么改进实现SVM算法程序?答案来了

编辑 | 忆臻来源 | 深度学习这件小事&#xff08;ID&#xff1a;DL_NLP&#xff09;【导读】在 3D 动作识别领域&#xff0c;需要用到 SVM&#xff08;支持向量机算法&#xff09;&#xff0c;但是现在所知道的 SVM 算法很多很乱&#xff0c;相关的程序包也很多&#xff0c;有什…

跟着石头哥哥学cocos2d-x(三)---2dx引擎中的内存管理模型

2019独角兽企业重金招聘Python工程师标准>>> 2dx引擎中的对象内存管理模型&#xff0c;很简单就是一个对象池引用计数&#xff0c;本着学好2dx的好奇心&#xff0c;先这里开走吧&#xff0c;紧接上面两节&#xff0c;首先我们看一个编码场景代码&#xff1a; hello…

读8篇论文,梳理BERT相关模型进展与反思

作者 | 陈永强来源 | 微软研究院AI头条&#xff08;ID:MSRAsia)【导读】BERT 自从在 arXiv 上发表以来获得了很大的成功和关注&#xff0c;打开了 NLP 中 2-Stage 的潘多拉魔盒。随后涌现了一大批类似于“BERT”的预训练&#xff08;pre-trained&#xff09;模型&#xff0c;有…

Dlib库中实现正脸人脸检测的测试代码

Dlib库中提供了正脸人脸检测的接口&#xff0c;这里参考dlib/examples/face_detection_ex.cpp中的代码&#xff0c;通过调用Dlib中的接口&#xff0c;实现正脸人脸检测的测试代码&#xff0c;测试代码如下&#xff1a;#include "funset.hpp" #include <string>…

20189317 《网络攻防技术》 第二周作业

一.黑客信息 &#xff08;1&#xff09;国外黑客 1971年&#xff0c;卡普尔从耶鲁大学毕业。在校期间&#xff0c;他专修心理学、语言学以及计算机学科。也就是在这时他开始对计算机萌生兴趣。他继续到研究生院深造。20世纪60年代&#xff0c;退学是许多人的一个选择。只靠知识…

centos 6.4 SVN服务器多个项目的权限分组管理

根据本博客中的cent OS 6.4下的SVN服务器构建 一文&#xff0c;搭建好SVN服务器只能管理一个工程&#xff0c;如何做到不同的项目&#xff0c;多个成员的权限管理分配呢&#xff1f;一 需求开发服务器搭建好SVN服务器&#xff0c;不可能只管理一个工程项目&#xff0c;如何做到…

cifar数据集介绍及到图像转换的实现

CIFAR是一个用于普通物体识别的数据集。CIFAR数据集分为两种&#xff1a;CIFAR-10和CIFAR-100。The CIFAR-10 and CIFAR-100 are labeled subsets of the 80 million tiny images dataset. They were collected by Alex Krizhevsky, Vinod Nair, and Geoffrey Hinton.CIFAR-10由…

取代Python?Rust凭什么

作者 | Nathan J. Goldbaum译者 | 弯月&#xff0c;责编 | 屠敏来源 | CSDN&#xff08;ID&#xff1a;CSDNnews&#xff09;【导语】Rust 也能实现神经网络&#xff1f;在前一篇帖子中&#xff0c;作者介绍了MNIST数据集以及分辨手写数字的问题。在这篇文章中&#xff0c;他将…

【Mac】解决「无法将 chromedriver 移动到 /usr/bin 目录下」问题

问题描述 在搭建 Selenium 库 ChromeDriver 爬虫环境时&#xff0c;遇到了无法将 chromedriver 移动到 /usr/bin 目录下的问题&#xff0c;如下图&#xff1a; 一查原来是因为系统有一个 System Integrity Protection (SIP) 系统完整性保护&#xff0c;如果此功能不关闭&#…

【译文】怎样让一天有36个小时

作者&#xff1a;Jon Bischke原文地址&#xff1a;How to Have a 36 Hour Day 你经常听人说“真希望一天能多几个小时”或者类似的话吗&#xff1f;当然&#xff0c;现实中我们每天只有24小时。这么说吧&#xff0c;人和人怎样度过这24个小时是完全不同的。到现在这样的说法已经…

Dlib库中实现正脸人脸关键点(landmark)检测的测试代码

Dlib库中提供了正脸人脸关键点检测的接口&#xff0c;这里参考dlib/examples/face_landmark_detection_ex.cpp中的代码&#xff0c;通过调用Dlib中的接口&#xff0c;实现正脸人脸关键点检测的测试代码&#xff0c;测试代码如下&#xff1a;/* reference: dlib/examples/face_l…

LeetCode--004--寻找两个有序数组的中位数(java)

转自https://blog.csdn.net/chen_xinjia/article/details/69258706 其中&#xff0c;N14,N26,size4610. 1&#xff0c;现在有的是两个已经排好序的数组&#xff0c;结果是要找出这两个数组中间的数值&#xff0c;如果两个数组的元素个数为偶数&#xff0c;则输出的是中间两个元…

开源sk-dist,超参数调优仅需3.4秒,sk-learn训练速度提升100倍

作者 | Evan Harris译者 | Monanfei编辑 | Jane 出品 | AI科技大本营&#xff08;ID&#xff1a;rgznai100)【导语】这篇文章为大家介绍了一个开源项目——sk-dist。在一台没有并行化的单机上进行超参数调优&#xff0c;需要 7.2 分钟&#xff0c;而在一百多个核心的 Spark 群集…

Windows和Linux下通用的线程接口

对于多线程开发&#xff0c;Linux下有pthread线程库&#xff0c;使用起来比较方便&#xff0c;而Windows没有&#xff0c;对于涉及到多线程的跨平台代码开发&#xff0c;会带来不便。这里参考网络上的一些文章&#xff0c;整理了在Windows和Linux下通用的线程接口。经过测试&am…

MySQL 性能调优的10个方法

MYSQL 应该是最流行了 WEB 后端数据库。WEB 开发语言最近发展很快&#xff0c;PHP&#xff0c; Ruby, Python, Java 各有特点&#xff0c;虽然 NOSQL 最近越來越多的被提到&#xff0c;但是相信大部分架构师还是会选择 MYSQL 来做数据存储。MYSQL 如此方便和稳定&#xff0c;以…

他们用卷积神经网络,发现了名画中隐藏的秘密

作者 | 神经小刀来源 |HyperAI超神经&#xff08; ID: HyperAI&#xff09;导语&#xff1a;著名的艺术珍品《根特祭坛画》&#xff0c;正在进行浩大的修复工作&#xff0c;以保证现在的人们能感受到这幅伟大的巨制&#xff0c;散发出的灿烂光芒。而随着技术的进步&#xff0c;…

机器学习公开课~~~~mooc

https://class.coursera.org/ntumlone-001/class/index

DLM:微信大规模分布式n-gram语言模型系统

来源 | 微信后台团队Wechat & NUS《A Distributed System for Large-scale n-gram Language Models at Tencent》分布式语言模型&#xff0c;支持大型n-gram LM解码的系统。本文是对原VLDB2019论文的简要翻译。摘要n-gram语言模型广泛用于语言处理&#xff0c;例如自动语音…

Ubuntu14.04 64位机上安装cuda8.0+cudnn5.0操作步骤

查看Ubuntu14.04 64位上显卡信息&#xff0c;执行&#xff1a;lspci | grep -i vga lspci -v -s 01:00.0 nvidia-smi第一条此命令可以显示一些显卡的相关信息&#xff1b;如果想查看某个详细信息&#xff0c;可以执行第二条命令;如果是NVIDIA卡&#xff0c; 可继续执行第三条命…

SQLI DUMB SERIES-5

less5 &#xff08;1&#xff09;输入单引号&#xff0c;回显错误&#xff0c;说明存在注入点。输入的Id被一对单引号所包围&#xff0c;可以闭合单引号 &#xff08;2&#xff09;输入正常时&#xff1a;?id1 说明没有显示位&#xff0c;因此不能使用联合查询了&#xff1b;可…

javascript RegExp

http://www.w3schools.com/jsref/jsref_obj_regexp.asp声明-------------modifiers&#xff1a;{i,g,m}1. var pattnew RegExp(pattern,modifiers);2. var patt/pattern/modifiers;------------------------例子&#xff1a;var str "Visit W3Schools"; //两…

Ubuntu14.04 64位机上安装OpenCV2.4.13(CUDA8.0)版操作步骤

Ubuntu14.04 64位机上安装CUDA8.0的操作步骤可以参考http://blog.csdn.net/fengbingchun/article/details/53840684&#xff0c;这里是在已经正确安装了CUDA8.0的基础上安装OpenCV2.4.13(CUDA8.0)操作步骤&#xff1a;1. 从http://opencv.org/downloads.html 下载OpenCV2.…

一篇文章能够看懂基础代码之CSS

web页面主要分为三块内容&#xff1a;js&#xff1a;控制用户行为和执行代码行为html元素&#xff1a;控制页面显示哪些控件&#xff08;例如按钮&#xff0c;输入框&#xff0c;文本等&#xff09;css&#xff1a;控制如何显示页面上的空间&#xff0c;例如布局&#xff0c;颜…

谷歌NIPS论文Transformer模型解读:只要Attention就够了

作者 | Sherwin Chen译者 | Major&#xff0c;编辑 | 夕颜出品 | AI科技大本营&#xff08;ID:rgznai100&#xff09;导读&#xff1a;在 NIPS 2017 上&#xff0c;谷歌的 Vaswani 等人提出了 Transformer 模型。它利用自我注意&#xff08;self-attention&#xff09;来计算其…

中国移动与苹果联姻 三星在华霸主地位或遭取代

据国外媒体12月24日报道&#xff0c;在各方的期待下&#xff0c;苹果终于宣布中国移动将于2014年1月17日开始销售支持其网络的iPhone手机。而中国移动也将于12 月25日开始正式接受预定。作为中国以及世界最大的移动运营商&#xff0c;中国移动与苹果的合作&#xff0c;将会帮助…

二维码Data Matrix编码、解码使用举例

二维码Data Matrix的介绍见&#xff1a; http://blog.csdn.net/fengbingchun/article/details/44279967 &#xff0c;这里简单写了个生成二维码和对二维码进行识别的测试例子&#xff0c;如下&#xff1a;int test_data_matrix_encode() {std::string str "中国_abc_DEF…

PDF文件如何转成markdown格式

百度上根据pdf转makrdown为关键字进行搜索&#xff0c;结果大多数是反过来的转换&#xff0c;即markdown文本转PDF格式。 但是PDF转markdown的解决方案很少。 正好我工作上有这个需求&#xff0c;所以自己实现了一个解决方案。 下图是一个用PDF XChange Editor打开的PDF文件&am…

关于SAP BW提示“Carry out repairs in non-original only

为什么80%的码农都做不了架构师&#xff1f;>>> 这个提示是由于你在生产系统&#xff08;正式系统&#xff09;里面修改了一些东西&#xff0c;才提示"Carry out repairs in non-original system only if they are urgent"这个警告&#xff0c;理论上我们…

windows7 64位机上安装配置CUDA7.5(或8.0)+cudnn5.0操作步骤

按照官网文档 http://docs.nvidia.com/cuda/cuda-installation-guide-microsoft-windows/index.html#axzz4TpI4c8vf 进行安装&#xff1a;在windows7上安装cuda8.0/cuda7.5的系统需求&#xff1a;(1)、ACUDA-capable GPU(本机显卡为GeForce GT 640M)&#xff1b;(2)、A support…