Upgrade pax-url-aether version to 2.4.7 for karaf 4.0.3

Upgrade pax-url-aether version to 2.4.7 for karaf 4.0.3

talend-esb/pom.xml

  • add version variable
    <pax-url-aether.version>2.4.7</pax-url-aether.version>
  • add dependency
<!-- workaround for pax-url-aether bundle (TESB-17522) -->
<dependency>
    <groupId>org.ops4j.pax.url</groupId>
    <artifactId>pax-url-aether</artifactId>
    <version>${pax-url-aether.version}</version>
    <type>jar</type>
</dependency>
  • add copy jar in generate-resources phase
<artifactItem>
    <groupId>org.ops4j.pax.url</groupId>
    <artifactId>pax-url-aether</artifactId>
    <version>${pax-url-aether.version}</version>
    <type>jar</type>
    <outputDirectory>target/dependencies</outputDirectory>
    <destFileName>pax-url-aether-${pax-url-aether.version}.jar</destFileName>
</artifactItem>
  • add replace version in generate-resources phase for framework-${karaf.version}-features.xml file
<!-- patch for Karaf org.apache.karaf.features.core bundle -->
<replace file="target/dependencies/framework-${karaf.version}-features.xml">
    <replacefilter
        token="org.apache.karaf.features/org.apache.karaf.features.core/${karaf.version}&lt;"
        value="org.apache.karaf.features/org.apache.karaf.features.core/${karaf.version}/jar/TESB&lt;"/>
    <!-- patch for upgrading pax-url-aether to 2.4.7 (TESB-17522) -->
    <replacefilter
        token="mvn:org.ops4j.pax.url/pax-url-aether/2.4.3"
        value="mvn:org.ops4j.pax.url/pax-url-aether/${pax-url-aether.version}"/>
</replace>
  • add repo dependencies in pom
<repository>
    <id>ops4j.sonatype.releases</id>
    <name>OPS4J releases repository</name>
    <url>https://oss.sonatype.org/content/repositories/ops4j-releases/</url>
    <releases>
        <enabled>true</enabled>
    </releases>
    <snapshots>
        <enabled>false</enabled>
    </snapshots>
</repository>

talend-esb/src/main/descriptors/unix-bin.xml and talend-esb/src/main/descriptors/win-bin.xml

  • exclude startup.properties file <exclude>etc/startup.properties</exclude>
  • copy jar into local repo
<!-- pax-url-aether library patch (TESB-17522) -->
<file>
    <source>${basedir}/target/dependencies/pax-url-aether-${pax-url-aether.version}.jar</source>
    <outputDirectory>/container/system/org/ops4j/pax/url/pax-url-aether/${pax-url-aether.version}/</outputDirectory>
    <fileMode>0644</fileMode>
</file>    

talend-esb/src/main/filtered-resources/etc/org.ops4j.pax.url.mvn.cfg

  • add always param in the cfg file for maven
    org.ops4j.pax.url.mvn.globalUpdatePolicy=always

talend-esb/src/main/filtered-resources/etc/startup.properties

  • add mvn\:org.ops4j.pax.url/pax-url-aether/2.4.7 = 5 in this file

https://github.com/Talend/tesb-rt-se/commit/a72e35789b1d3a33d8b97b641d25f4df8b0ab8ea

Spring Bean's Scope

Bean scopes

  • @Service
  • @Scope(ConfigurableBeanFactory.SCOPE_PROTOTYPE)
ScopeDescription

singleton

Scopes a single bean definition to a single object instance per Spring IoC container.

prototype

Scopes a single bean definition to any number of object instances.

request

Scopes a single bean definition to the lifecycle of a single HTTP request; that is each and every HTTP request will have its own instance of a bean created off the back of a single bean definition. Only valid in the context of a web-aware Spring ApplicationContext.

session

Scopes a single bean definition to the lifecycle of a HTTP Session. Only valid in the context of a web-aware Spring ApplicationContext.

global session

Scopes a single bean definition to the lifecycle of a global HTTP Session. Typically only valid when used in a portlet context. Only valid in the context of a web-aware Spring ApplicationContext.

import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Service;

@Service
@Scope("prototype")
public class DemoPrototypeService {
    public DemoPrototypeService() {
    }
}
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.io.Resource;
import org.springframework.stereotype.Service;

/**
 * Created by yyi on 8/8/2016. Bean
 */
@Service
public class DemoService {
//    @Value("another class attribute")
    @Value("classpath:/ch2/el/test.txt")
    private Resource another;

    public Resource getAnother(){
        return another;
    }
    public void setAnother(Resource another) {
        this.another = another;
    }
}
import org.apache.commons.io.IOUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
import org.springframework.context.support.PropertySourcesPlaceholderConfigurer;
import org.springframework.core.env.Environment;
import org.springframework.core.io.Resource;

/**
 * Created by yyi on 8/8/2016.
 */
@Configuration
@ComponentScan("ch2.el")
@PropertySource("classpath:ch2/el/test.properties")
public class ElConfig {
    @Value("I Love You!") //1
    private String normal;

    @Value("#{systemProperties['os.name']}") //2
    private String osName;

    @Value("#{ T(java.lang.Math).random() * 100.0 }") //3
    private double randomNumber;

    @Value("#{demoService.another}") //4
    private Resource fromAnother;

    @Value("classpath:/ch2/el/test.txt") //5
    private Resource testFile;

    @Value("http://www.baidu.com") //6
    private Resource testUrl;

    @Value("${company.team}") //7
    private String bookName;

    @Autowired
    private Environment environment; //7

    @Bean //7
    public static PropertySourcesPlaceholderConfigurer propertyConfigure() {
        return new PropertySourcesPlaceholderConfigurer();
    }
    public void outputResource() {
        try {
            System.out.println(normal);
            System.out.println(osName);
            System.out.println(randomNumber);
            System.out.println(fromAnother);
            System.out.println(IOUtils.toString(fromAnother.getInputStream()));

            System.out.println(IOUtils.toString(testFile.getInputStream()));
            System.out.println(IOUtils.toString(testUrl.getInputStream()));
            System.out.println(bookName);
            System.out.println(environment.getProperty("book.author"));
        } catch (Exception e) {
            e.printStackTrace();
        }

    }

}

Spring AOP

Maven 依赖

    <dependency>
        <groupId>org.aspectj</groupId>
        <artifactId>aspectjrt</artifactId>
        <version>1.8.5</version>
    </dependency>
    <dependency>
        <groupId>org.aspectj</groupId>
        <artifactId>aspectjweaver</artifactId>
        <version>1.8.5</version>
    </dependency>

  • AspectJ @Before
  • AspectJ @After
  • AspectJ @AfterReturning
  • AspectJ @Around

Introduction to the Build Lifecycle(Maven运行周期)

翻译自:Introduction to the Build Lifecycle

Table Of Contents

Build Lifecycle Basics
Setting Up Your Project to Use the Build Lifecycle
    Packaging
    Plugins
Lifecycle Reference
Built-in Lifecycle Bindings

大纲
基本概念
如何配置工程
打包
插件
详细内容
运行周期包含步骤

Build Lifecycle Basics

Maven is based around the central concept of a build lifecycle. What this means is that the process for building and distributing a particular artifact (project) is clearly defined.

For the person building a project, this means that it is only necessary to learn a small set of commands to build any Maven project, and the POM will ensure they get the results they desired.

There are three built-in build lifecycles: default, clean and site. The default lifecycle handles your project deployment, the clean lifecycle handles project cleaning, while the site lifecycle handles the creation of your project's site documentation.

基本概念
Maven是围绕运行周期来工作的,这样做的意义就是要明确界定出项目的生产和部署等不同处理过程。
我们只需要了解其中几个简单的Maven命令就可以通过POM文件去构建需要的工程了。
运行周期包含了三个基本步骤:缺省(default)、清理(clean)和站点(site)。缺省步骤负责部署项目,清理步骤用于项目的清理过程,使用站点步骤可以产生项目的文档和一些后续任务。

A Build Lifecycle is Made Up of Phases
一个构建周期包含的阶段

Each of these build lifecycles is defined by a different list of build phases, wherein a build phase represents a stage in the lifecycle.
每个构建周期是包含了不同的构建步骤序列,序列中的每一个构建步骤就是生命周期中的一个过程。

For example, the default lifecycle comprises of the following phases (for a complete list of the lifecycle phases, refer to the Lifecycle Reference):

validate - validate the project is correct and all necessary information is available
compile - compile the source code of the project
test - test the compiled source code using a suitable unit testing framework. These tests should not require the code be packaged or deployed
package - take the compiled code and package it in its distributable format, such as a JAR.
verify - run any checks on results of integration tests to ensure quality criteria are met
install - install the package into the local repository, for use as a dependency in other projects locally
deploy - done in the build environment, copies the final package to the remote repository for sharing with other developers and projects.

例如缺省模式就包含如下几个步骤(点击查看完整的构建步骤生命周期
确认-
编译 -
测试 -
打包 -
验证 -
安装 -
部署 -

These lifecycle phases (plus the other lifecycle phases not shown here) are executed sequentially to complete the default lifecycle. Given the lifecycle phases above, this means that when the default lifecycle is used, Maven will first validate the project, then will try to compile the sources, run those against the tests, package the binaries (e.g. jar), run integration tests against that package, verify the integration tests, install the verified package to the local repository, then deploy the installed package to a remote repository.

第1章 绪论 (1.3 研究目的与内容)

1.3 研究目的与内容
1.3.1 研究目的
论文主要研究目的是设计并实现具有多维度数据分析展现功能的日志分析软件平台,其中中间媒介数据载体(Intermediate Database)使用的是基于Hadoop的分布式文件系统(Hadoop Distributed File System,简称HDFS),使用HDFS目的主要是用来提高日志分析中日志记录分组的计算效率,解决对大量日志数据的查询与管理问题。该日志分析软件平台可以满足对日常大量日志数据进行的模糊匹配分析工作,可实现文本日志数据导入;日志数据分析、归类;分析结果报表可视化的功能。
在需求分析阶段,主要分析了当前业务中使用的开源数据质量产品的主要功能点、用途,充分了解各个工具的工作的运行原理,提取重要功能进行梳理,并对数据处理的性能进行了分析。在当前业务处理过程中数据集成系统会产生大量日志文件,日志分析人员需要对不同类型日志进行分类处理,并需要提取日志中有用信息制作数据分析报表。
目前工作中对日志文件的管理和查询管理工作目前主要依赖开源数据分析软件完成,有诸多不便之处,本次开发的日志分析平台目标主要用于解决以下在日常数据处理过程中遇到的几个方面的问题:
(1) 将开源数据分析技术集成到同一平台完成日志数据分析
(2) 实现基于Map/Reduce的分组算法,提高对日志数据的分析效率
(3) 提高历史日志数据查询速度
(4) 将通用分组算法做成可配置模式,在数据分析过程中可配置不同分组算法
(5) 解决历史日志数据管理与存储问题
论文中设计的日志分析平台主要用于对日常日志数据进行高效处理,其中包括:使用模糊分组算法按关键词对日志文本数据进行分组、可视化展现分析结果。

1.3.2 主要研究内容
论文的研究内容主要包含在以下三个方面:基于RAP技术的日志分析平台用户界面设计与开发;对数据进行数据抽取、过滤、导入,完成基于常用数据匹配算法的Map/Reduce框架实现,以针对HDFS中数据的通用查询与分析功能;并实现以图表形式展现日志分析挖掘结果。具体技术方案如下:
(1) 基于Eclipse RAP(Remote Application Platform)框架开发日志分析软件前端界面
Eclipse RAP框架提供了丰富的网页前端控件,并集成OSGi(Open Service Gateway Initiative)与Java EE(Java Platform Enterprise Edition),本次开发的日志分析平台框架部分主要采用RAP开源技术结合Eclipse Plug-ins插件技术开发应用程序界面,利用插件开发技术将业务应用组件模块化,分别开发前端用户界面与后台数据处理功能模块,再通过RAP平台中的OSGi将系统整合起来,从而降低数据处理功能与前端用户界面的耦合性。前端用户界面主要功能包括:数据库连接控制视图,数据集成视图,数据检索视图,算法配置页面等。
(2) 研究HDFS分布式文件系统作为日志分析软件中间媒介数据载体的数据存储方式
Hadoop实现了一个分布式文件系统,简称HDFS。HDFS有着高容错性的数据处理优势,并且其设计架构允许将其部署在配置较低的硬件平台上。而且它提供高传输率来访问应用程序的数据。Hadoop的功能特点适合本次论文中所需的对于大量日志数据进行高效分析的需求,所以论文中设计到的数据处理技术采用基于Hadoop 分布式系统基础架构作为中间媒介数据载体并利用Map/Reduce框架实现数据分组功能(图1-2)。

ddd.png
图1-2 通过Map/Reduce框架处理HDFS
Figure 1-2 Map/Reduce in HDFS

(3) 多维数据可视化技术
基于联机分析处理技术设计多维数据模型用于存储日志分析结果,使用SpagoBI、JProvit等开源数据可视化技术组件实现对日志分析结果的可视化展现。